diff --git a/.github/workflows/core-typecheck.yml b/.github/workflows/core-typecheck.yml new file mode 100644 index 0000000000000..f9eb7c25058db --- /dev/null +++ b/.github/workflows/core-typecheck.yml @@ -0,0 +1,37 @@ +name: Core Typecheck + +on: + push: + branches: + - main + pull_request: + +env: + POETRY_VERSION: "1.8.3" + +jobs: + build: + runs-on: ubuntu-latest + strategy: + # You can use PyPy versions in python-version. + # For example, pypy-2.7 and pypy-3.8 + matrix: + python-version: ["3.9"] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: ${{ github.event_name == 'pull_request' && 2 || 0 }} + - name: Install Poetry + run: pipx install poetry==${{ env.POETRY_VERSION }} + - name: Set up python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "poetry" + cache-dependency-path: "**/poetry.lock" + - name: Install core + shell: bash + run: cd ./llama-index-core/ && poetry install --with dev + - name: Run MyPy + shell: bash + run: cd ./llama-index-core/ && poetry run mypy llama_index diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 6a87c9bc9435b..dd0f0c1edd73e 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,6 +9,9 @@ build: os: ubuntu-22.04 tools: python: "3.12" + jobs: + pre_build: + - python docs/merge_llama_deploy_docs.py mkdocs: configuration: docs/mkdocs.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 39e082282f7bf..893926c056103 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,345 @@ # ChangeLog +## [2024-09-12] + +### `llama-index-core` [0.11.9] + +- Add callback manager to retriever query engine from args (#15990) +- Do not pass system prompt from fn calling runner to fn calling worker (#15986) +- fix: Error when parsing react output if tool name contains non-English characters (#15956) + +### `llama-index-embeddings-alibabacloud-aisearch` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + +### `llama-index-experimental` [0.3.1] + +- Add NUDGE Finetuning (#15954) + +### `llama-index-graph-stores-falkordb` [0.2.2] + +- update falkordb client (#15940) + +### `llama-index-llms-openai` [0.2.5] + +- Add support for o1 openai models (#15979) +- force temp to 1.0 for o1 (#15983) + +### `llama-index-node-parser-alibabacloud-aisearch` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + +### `llama-index-postprocessor-alibabacloud-aisearch-rerank` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + +### `llama-index-readers-alibabacloud-aisearch` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + +### `llama-index-vector-stores-opensearch` [0.3.0] + +- Differentiate sync and async calls in OpenSearchVectorClient (#15945) + +### `llama-index-vector-stores-postgres` [0.2.4] + +- fix attribute error in PGVectorStore (#15961) +- add support for engine parameters (#15951) + +### `llama-index-vector-stores-wordlift` [0.4.5] + +- Catch nest_asyncio errors (#15975) + +## [2024-09-09] + +### `llama-index-core` [0.11.8] + +- feat: Add a retry policy config to workflow steps (#15757) +- Add doc id to Langchain format conversions (#15928) + +### `llama-index-chat-store-dynamodb` [0.1.0] + +- Add DynamoDBChatStore (#15917) + +### `llama-index-cli` [0.3.1] + +- Fix RagCLI pydantic error (#15931) + +### `llama-index-llms-alibabacloud-aisearch` [0.1.0] + +- add llama-index llms alibabacloud_aisearch integration (#15850) + +### `llama-index-llms-mistralai` [0.2.3] + +- Make default mistral model support function calling with `large-latest` (#15906) + +### `llama-index-llms-vertex` [0.3.4] + +- Add InternalServerError to retry decorator (#15921) + +### `llama-index-postprocessor-rankllm-rerank` [0.3.0] + +- Update RankLLM with new rerankers (#15892) + +### `llama-index-vector-stores-azurecosmosnosql` [1.0.0] + +- Adding vector store for Azure Cosmos DB NoSql (#14158) + +### `llama-index-readers-microsoft-sharepoint` [0.3.1] + +- Fix error handling in sharepoint reader, fix error with download file (#15868) + +### `llama-index-vector-stores-wordlift` [0.4.4] + +- Adding support for MetadataFilters to WordLift Vector Store (#15905) + +### `llama-index-vector-stores-opensearch` [0.2.2] + +- Opensearch Serverless filtered query support using knn_score script (#15899) + +## [2024-09-06] + +### `llama-index-core` [0.11.7] + +- Make SentenceSplitter's secondary_chunking_regex optional (#15882) +- force openai structured output (#15706) +- fix assert error, add type ignore for streaming agents (#15887) +- Fix image document deserialization issue (#15857) + +### `llama-index-graph-stores-kuzu` [0.3.2] + +- Bug fix for KuzuPropertyGraphStore: Allow upserting relations even when chunks are absent (#15889) + +### `llama-index-llms-bedrock-converse` [0.3.0] + +- Removed unused llama-index-llms-anthropic dependency from Bedrock Converse (#15869) + +### `llama-index-vector-stores-postgres` [0.2.2] + +- Fix PGVectorStore with latest pydantic, update pydantic imports (#15886) + +### `llama-index-vector-stores-tablestore` [0.1.0] + +- Add TablestoreVectorStore (#15657) + +## [2024-09-05] + +### `llama-index-core` [0.11.6] + +- add llama-deploy docs to docs builds (#15794) +- Add oreilly course cookbooks (#15845) + +### `llama-index-readers-box` [0.2.1] + +- Various bug fixes (#15836) + +### `llama-index-readers-file` [0.2.1] + +- Update ImageReader file loading logic (#15848) + +### `llama-index-tools-box` [0.2.1] + +- Various bug fixes (#15836) + +### `llama-index-vector-stores-opensearch` [0.2.1] + +- Refresh Opensearch index after delete operation (#15854) + +## [2024-09-04] + +### `llama-index-core` [0.11.5] + +- remove unneeded assert in property graph retriever (#15832) +- make simple property graphs serialize again (#15833) +- fix json schema for fastapi return types on core components (#15816) + +### `llama-index-llms-nvidia` [0.2.2] + +- NVIDIA llm: Add Completion for starcoder models (#15802) + +### `llama-index-llms-ollama` [0.3.1] + +- add ollama response usage (#15773) + +### `llama-index-readers-dashscope` [0.2.1] + +- fix pydantic v2 validation errors (#15800) + +### `llama-index-readers-discord` [0.2.1] + +- fix: convert Document id from int to string in DiscordReader (#15806) + +### `llama-index-vector-stores-mariadb` [0.1.0] + +- Add MariaDB vector store integration package (#15564) + +## [2024-09-02] + +### `llama-index-core` [0.11.4] + +- Add mypy to core (#14883) +- Fix incorrect instrumentation fields/types (#15752) +- FunctionCallingAgent sources bug + light wrapper to create agent (#15783) +- Add text to sql advanced workflow nb (#15775) +- fix: remove context after streaming workflow to enable streaming again (#15776) +- Fix chat memory persisting and loading methods to use correct JSON format (#15545) +- Fix `_example_type` class var being read as private attr with Pydantic V2 (#15758) + +### `llama-index-embeddings-litellm` [0.2.1] + +- add dimensions param to LiteLLMEmbedding, fix a bug that prevents reading vars from env (#15770) + +### `llama-index-embeddings-upstage` [0.2.1] + +- Bugfix upstage embedding when initializing the UpstageEmbedding class (#15767) + +### `llama-index-embeddings-sagemaker-endpoint` [0.2.2] + +- Fix Sagemaker Field required issue (#15778) + +### `llama-index-graph-stores-falkordb` [0.2.1] + +- fix relations upsert with special chars (#15769) + +### `llama-index-graph-stores-neo4j` [0.3.1] + +- Add native vector index support for neo4j lpg and fix vector filters (#15759) + +### `llama-index-llms-azure-inference` [0.2.2] + +- fix: GitHub Models metadata retrieval (#15747) + +### `llama-index-llms-bedrock` [0.2.1] + +- Update `base.py` to fix `self` issues (#15729) + +### `llama-index-llms-ollama` [0.3.1] + +- add ollama response usage (#15773) + +### `llama-index-llms-sagemaker-endpoint` [0.2.2] + +- Fix Sagemaker Field required issue (#15778) + +### `llama-index-multi-modal-llms-anthropic` [0.2.1] + +- Support image type detection without knowing the file name (#15763) + +### `llama-index-vector-stores-milvus` [0.2.2] + +- feat: implement get_nodes for MilvusVectorStore (#15696) + +### `llama-index-vector-stores-tencentvectordb` [0.2.1] + +- fix: tencentvectordb inconsistent attribute name (#15733) + +## [2024-08-29] + +### `llama-index-core` [0.11.3] + +- refact: merge Context and Session to simplify the workflows api (#15709) +- chore: stop using deprecated `ctx.data` in workflows docs (#15716) +- fix: stop streaming workflow events when a step raises (#15714) +- Fix llm_chat_callback for multimodal llms (#15700) +- chore: Increase unit tests coverage for the workflow package (#15691) +- fix SimpleVectorStore.from_persist_dir() behaviour (#15534) + +### `llama-index-embeddings-azure-openai` [0.2.5] + +- fix json serialization for azure embeddings (#15724) + +### `llama-index-graph-stores-kuzu` [0.3.0] + +- Add KuzuPropertyGraphStore (#15678) + +### `llama-index-indices-managed-vectara` [0.2.1] + +- added new User Defined Function reranker (#15546) + +### `llama-index-llms-mistralai` [0.2.2] + +- Fix `random_seed` type in mistral llm (#15701) + +### `llama-index-llms-nvidia` [0.2.1] + +- Add function/tool calling support to nvidia llm (#15359) + +### `llama-index-multi-modal-llms-ollama` [0.3.0] + +- bump ollama client deps for multimodal llm (#15702) + +### `llama-index-readers-web` [0.2.1] + +- Fix: Firecrawl scraping url response (#15720) + +### `llama-index-selectors-notdiamond` [0.1.0] + +- Adding Not Diamond to llama_index (#15703) + +### `llama-index-vector-stores-milvus` [0.2.3] + +- MMR in Milvus vector stores (#15634) +- feat: implement get_nodes for MilvusVectorStore (#15696) + +## [2024-08-27] + +### `llama-index-core` [0.11.2] + +- fix tool schemas generation for pydantic v2 to handle nested models (#15679) +- feat: support default values for nested workflows (#15660) +- feat: allow FunctionTool with just an async fn (#15638) +- feat: Allow streaming events from steps (#15488) +- fix auto-retriever pydantic indent error (#15648) +- Implement Router Query Engine example using workflows (#15635) +- Add multi step query engine example using workflows (#15438) +- start traces for llm-level operations (#15542) +- Pass callback_manager to init in CodeSplitter from_defaults (#15585) + +### `llama-index-embeddings-xinference` [0.1.0] + +- Add Xinference Embedding Class (#15579) + +### `llama-index-llms-ai21` [0.3.3] + +- Integrations: AI21 function calling Support (#15622) + +### `llama-index-llms-anthropic` [0.3.0] + +- Added support for anthropic models through GCP Vertex AI (#15661) + +### `llama-index-llms-cerebras` [0.1.0] + +- Implement Cerebras Integration (#15665) + +### `llama-index-postprocessor-nvidia-rerank` [0.3.1] + +- fix downloaded nim endpoint path (#15645) +- fix llama-index-postprocessor-nvidia-rerank tests (#15643) + +### `llama-index-postprocessor-xinference-rerank` [0.1.0] + +- add xinference rerank class (#15639) + +### `llama-index-vector-stores-alibabacloud-opensearch` [0.2.1] + +- fix set output fields in AlibabaCloudOpenSearchConfig (#15562) + +### `llama-index-vector-stores-azureaisearch` [0.2.1] + +- Upgrade azure-search-documents to 2024-07-01 GA API and Add Support for Scalar and Binary Quantization in Index Creation (#15650) + +### `llama-index-vector-stores-neo4j` [0.2.1] + +- Neo4j Vector Store: Make Embedding Dimension Check Optional (#15628) + +### `llama-inde-vector-stores-milvus` [0.2.1] + +- Change the default consistency level of Milvus (#15577) + +### `llama-index-vector-stores-elasticsearch` [0.3.2] + +- Fix the ElasticsearchStore key error (#15631) + ## [2024-08-23] ### `llama-index-core` [0.11.1] diff --git a/docs/docs/CHANGELOG.md b/docs/docs/CHANGELOG.md index 39e082282f7bf..16186fd5ff033 100644 --- a/docs/docs/CHANGELOG.md +++ b/docs/docs/CHANGELOG.md @@ -1,5 +1,347 @@ # ChangeLog +## [2024-09-12] + +### `llama-index-core` [0.11.9] + +- Add callback manager to retriever query engine from args (#15990) +- Do not pass system prompt from fn calling runner to fn calling worker (#15986) +- fix: Error when parsing react output if tool name contains non-English characters (#15956) + +### `llama-index-embeddings-alibabacloud-aisearch` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + +### `llama-index-experimental` [0.3.1] + +- Add NUDGE Finetuning (#15954) + +### `llama-index-graph-stores-falkordb` [0.2.2] + +- update falkordb client (#15940) + +### `llama-index-llms-openai` [0.2.5] + +- Add support for o1 openai models (#15979) +- force temp to 1.0 for o1 (#15983) + +### `llama-index-node-parser-alibabacloud-aisearch` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + +### `llama-index-postprocessor-alibabacloud-aisearch-rerank` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + + +### `llama-index-readers-alibabacloud-aisearch` [0.1.0] + +- Add four alibabacloud-aisearch llama-index integrations: rerank, node_parser, readers, embeddings (#15934) + + +### `llama-index-vector-stores-opensearch` [0.3.0] + +- Differentiate sync and async calls in OpenSearchVectorClient (#15945) + +### `llama-index-vector-stores-postgres` [0.2.4] + +- fix attribute error in PGVectorStore (#15961) +- add support for engine parameters (#15951) + +### `llama-index-vector-stores-wordlift` [0.4.5] + +- Catch nest_asyncio errors (#15975) + +## [2024-09-09] + +### `llama-index-core` [0.11.8] + +- feat: Add a retry policy config to workflow steps (#15757) +- Add doc id to Langchain format conversions (#15928) + +### `llama-index-chat-store-dynamodb` [0.1.0] + +- Add DynamoDBChatStore (#15917) + +### `llama-index-cli` [0.3.1] + +- Fix RagCLI pydantic error (#15931) + +### `llama-index-llms-alibabacloud-aisearch` [0.1.0] + +- add llama-index llms alibabacloud_aisearch integration (#15850) + +### `llama-index-llms-mistralai` [0.2.3] + +- Make default mistral model support function calling with `large-latest` (#15906) + +### `llama-index-llms-vertex` [0.3.4] + +- Add InternalServerError to retry decorator (#15921) + +### `llama-index-postprocessor-rankllm-rerank` [0.3.0] + +- Update RankLLM with new rerankers (#15892) + +### `llama-index-vector-stores-azurecosmosnosql` [1.0.0] + +- Adding vector store for Azure Cosmos DB NoSql (#14158) + +### `llama-index-readers-microsoft-sharepoint` [0.3.1] + +- Fix error handling in sharepoint reader, fix error with download file (#15868) + +### `llama-index-vector-stores-wordlift` [0.4.4] + +- Adding support for MetadataFilters to WordLift Vector Store (#15905) + +### `llama-index-vector-stores-opensearch` [0.2.2] + +- Opensearch Serverless filtered query support using knn_score script (#15899) + +## [2024-09-06] + +### `llama-index-core` [0.11.7] + +- Make SentenceSplitter's secondary_chunking_regex optional (#15882) +- force openai structured output (#15706) +- fix assert error, add type ignore for streaming agents (#15887) +- Fix image document deserialization issue (#15857) + +### `llama-index-graph-stores-kuzu` [0.3.2] + +- Bug fix for KuzuPropertyGraphStore: Allow upserting relations even when chunks are absent (#15889) + +### `llama-index-llms-bedrock-converse` [0.3.0] + +- Removed unused llama-index-llms-anthropic dependency from Bedrock Converse (#15869) + +### `llama-index-vector-stores-postgres` [0.2.2] + +- Fix PGVectorStore with latest pydantic, update pydantic imports (#15886) + +### `llama-index-vector-stores-tablestore` [0.1.0] + +- Add TablestoreVectorStore (#15657) + +## [2024-09-05] + +### `llama-index-core` [0.11.6] + +- add llama-deploy docs to docs builds (#15794) +- Add oreilly course cookbooks (#15845) + +### `llama-index-readers-box` [0.2.1] + +- Various bug fixes (#15836) + +### `llama-index-readers-file` [0.2.1] + +- Update ImageReader file loading logic (#15848) + +### `llama-index-tools-box` [0.2.1] + +- Various bug fixes (#15836) + +### `llama-index-vector-stores-opensearch` [0.2.1] + +- Refresh Opensearch index after delete operation (#15854) + +## [2024-09-04] + +### `llama-index-core` [0.11.5] + +- remove unneeded assert in property graph retriever (#15832) +- make simple property graphs serialize again (#15833) +- fix json schema for fastapi return types on core components (#15816) + +### `llama-index-llms-nvidia` [0.2.2] + +- NVIDIA llm: Add Completion for starcoder models (#15802) + +### `llama-index-llms-ollama` [0.3.1] + +- add ollama response usage (#15773) + +### `llama-index-readers-dashscope` [0.2.1] + +- fix pydantic v2 validation errors (#15800) + +### `llama-index-readers-discord` [0.2.1] + +- fix: convert Document id from int to string in DiscordReader (#15806) + +### `llama-index-vector-stores-mariadb` [0.1.0] + +- Add MariaDB vector store integration package (#15564) + +## [2024-09-02] + +### `llama-index-core` [0.11.4] + +- Add mypy to core (#14883) +- Fix incorrect instrumentation fields/types (#15752) +- FunctionCallingAgent sources bug + light wrapper to create agent (#15783) +- Add text to sql advanced workflow nb (#15775) +- fix: remove context after streaming workflow to enable streaming again (#15776) +- Fix chat memory persisting and loading methods to use correct JSON format (#15545) +- Fix `_example_type` class var being read as private attr with Pydantic V2 (#15758) + +### `llama-index-embeddings-litellm` [0.2.1] + +- add dimensions param to LiteLLMEmbedding, fix a bug that prevents reading vars from env (#15770) + +### `llama-index-embeddings-upstage` [0.2.1] + +- Bugfix upstage embedding when initializing the UpstageEmbedding class (#15767) + +### `llama-index-embeddings-sagemaker-endpoint` [0.2.2] + +- Fix Sagemaker Field required issue (#15778) + +### `llama-index-graph-stores-falkordb` [0.2.1] + +- fix relations upsert with special chars (#15769) + +### `llama-index-graph-stores-neo4j` [0.3.1] + +- Add native vector index support for neo4j lpg and fix vector filters (#15759) + +### `llama-index-llms-azure-inference` [0.2.2] + +- fix: GitHub Models metadata retrieval (#15747) + +### `llama-index-llms-bedrock` [0.2.1] + +- Update `base.py` to fix `self` issues (#15729) + +### `llama-index-llms-ollama` [0.3.1] + +- add ollama response usage (#15773) + +### `llama-index-llms-sagemaker-endpoint` [0.2.2] + +- Fix Sagemaker Field required issue (#15778) + +### `llama-index-multi-modal-llms-anthropic` [0.2.1] + +- Support image type detection without knowing the file name (#15763) + +### `llama-index-vector-stores-milvus` [0.2.2] + +- feat: implement get_nodes for MilvusVectorStore (#15696) + +### `llama-index-vector-stores-tencentvectordb` [0.2.1] + +- fix: tencentvectordb inconsistent attribute name (#15733) + +## [2024-08-29] + +### `llama-index-core` [0.11.3] + +- refact: merge Context and Session to simplify the workflows api (#15709) +- chore: stop using deprecated `ctx.data` in workflows docs (#15716) +- fix: stop streaming workflow events when a step raises (#15714) +- Fix llm_chat_callback for multimodal llms (#15700) +- chore: Increase unit tests coverage for the workflow package (#15691) +- fix SimpleVectorStore.from_persist_dir() behaviour (#15534) + +### `llama-index-embeddings-azure-openai` [0.2.5] + +- fix json serialization for azure embeddings (#15724) + +### `llama-index-graph-stores-kuzu` [0.3.0] + +- Add KuzuPropertyGraphStore (#15678) + +### `llama-index-indices-managed-vectara` [0.2.1] + +- added new User Defined Function reranker (#15546) + +### `llama-index-llms-mistralai` [0.2.2] + +- Fix `random_seed` type in mistral llm (#15701) + +### `llama-index-llms-nvidia` [0.2.1] + +- Add function/tool calling support to nvidia llm (#15359) + +### `llama-index-multi-modal-llms-ollama` [0.3.0] + +- bump ollama client deps for multimodal llm (#15702) + +### `llama-index-readers-web` [0.2.1] + +- Fix: Firecrawl scraping url response (#15720) + +### `llama-index-selectors-notdiamond` [0.1.0] + +- Adding Not Diamond to llama_index (#15703) + +### `llama-index-vector-stores-milvus` [0.2.3] + +- MMR in Milvus vector stores (#15634) +- feat: implement get_nodes for MilvusVectorStore (#15696) + +## [2024-08-27] + +### `llama-index-core` [0.11.2] + +- fix tool schemas generation for pydantic v2 to handle nested models (#15679) +- feat: support default values for nested workflows (#15660) +- feat: allow FunctionTool with just an async fn (#15638) +- feat: Allow streaming events from steps (#15488) +- fix auto-retriever pydantic indent error (#15648) +- Implement Router Query Engine example using workflows (#15635) +- Add multi step query engine example using workflows (#15438) +- start traces for llm-level operations (#15542) +- Pass callback_manager to init in CodeSplitter from_defaults (#15585) + +### `llama-index-embeddings-xinference` [0.1.0] + +- Add Xinference Embedding Class (#15579) + +### `llama-index-llms-ai21` [0.3.3] + +- Integrations: AI21 function calling Support (#15622) + +### `llama-index-llms-anthropic` [0.3.0] + +- Added support for anthropic models through GCP Vertex AI (#15661) + +### `llama-index-llms-cerebras` [0.1.0] + +- Implement Cerebras Integration (#15665) + +### `llama-index-postprocessor-nvidia-rerank` [0.3.1] + +- fix downloaded nim endpoint path (#15645) +- fix llama-index-postprocessor-nvidia-rerank tests (#15643) + +### `llama-index-postprocessor-xinference-rerank` [0.1.0] + +- add xinference rerank class (#15639) + +### `llama-index-vector-stores-alibabacloud-opensearch` [0.2.1] + +- fix set output fields in AlibabaCloudOpenSearchConfig (#15562) + +### `llama-index-vector-stores-azureaisearch` [0.2.1] + +- Upgrade azure-search-documents to 2024-07-01 GA API and Add Support for Scalar and Binary Quantization in Index Creation (#15650) + +### `llama-index-vector-stores-neo4j` [0.2.1] + +- Neo4j Vector Store: Make Embedding Dimension Check Optional (#15628) + +### `llama-inde-vector-stores-milvus` [0.2.1] + +- Change the default consistency level of Milvus (#15577) + +### `llama-index-vector-stores-elasticsearch` [0.3.2] + +- Fix the ElasticsearchStore key error (#15631) + ## [2024-08-23] ### `llama-index-core` [0.11.1] diff --git a/docs/docs/_static/integrations/opik.png b/docs/docs/_static/integrations/opik.png new file mode 100644 index 0000000000000..fd950a0e93735 Binary files /dev/null and b/docs/docs/_static/integrations/opik.png differ diff --git a/docs/docs/agents/agent_step_execute.png b/docs/docs/agents/agent_step_execute.png deleted file mode 100644 index 0008b41dc4376..0000000000000 Binary files a/docs/docs/agents/agent_step_execute.png and /dev/null differ diff --git a/docs/docs/api_reference/callbacks/opik.md b/docs/docs/api_reference/callbacks/opik.md new file mode 100644 index 0000000000000..8b3b87fce25c0 --- /dev/null +++ b/docs/docs/api_reference/callbacks/opik.md @@ -0,0 +1,4 @@ +::: llama_index.callbacks.opik + options: + members: + - OpikCallbackHandler diff --git a/docs/docs/api_reference/embeddings/alibabacloud_aisearch.md b/docs/docs/api_reference/embeddings/alibabacloud_aisearch.md new file mode 100644 index 0000000000000..039e8ce7465e5 --- /dev/null +++ b/docs/docs/api_reference/embeddings/alibabacloud_aisearch.md @@ -0,0 +1,4 @@ +::: llama_index.embeddings.alibabacloud_aisearch + options: + members: + - AlibabaCloudAISearchEmbedding diff --git a/docs/docs/api_reference/embeddings/xinference.md b/docs/docs/api_reference/embeddings/xinference.md new file mode 100644 index 0000000000000..69b2d3ef385cd --- /dev/null +++ b/docs/docs/api_reference/embeddings/xinference.md @@ -0,0 +1,4 @@ +::: llama_index.embeddings.xinference + options: + members: + - XinferenceEmbedding diff --git a/docs/docs/api_reference/llms/alibabacloud_aisearch.md b/docs/docs/api_reference/llms/alibabacloud_aisearch.md new file mode 100644 index 0000000000000..99d9c86f18553 --- /dev/null +++ b/docs/docs/api_reference/llms/alibabacloud_aisearch.md @@ -0,0 +1,4 @@ +::: llama_index.llms.alibabacloud_aisearch + options: + members: + - AlibabaCloudAISearchLLM diff --git a/docs/docs/api_reference/node_parser/alibabacloud_aisearch.md b/docs/docs/api_reference/node_parser/alibabacloud_aisearch.md new file mode 100644 index 0000000000000..4f8f33f8f2385 --- /dev/null +++ b/docs/docs/api_reference/node_parser/alibabacloud_aisearch.md @@ -0,0 +1,4 @@ +::: llama_index.node_parser.alibabacloud_aisearch + options: + members: + - AlibabaCloudAISearchNodeParser diff --git a/docs/docs/api_reference/postprocessor/alibabacloud_aisearch_rerank.md b/docs/docs/api_reference/postprocessor/alibabacloud_aisearch_rerank.md new file mode 100644 index 0000000000000..855a7e783ed60 --- /dev/null +++ b/docs/docs/api_reference/postprocessor/alibabacloud_aisearch_rerank.md @@ -0,0 +1,4 @@ +::: llama_index.postprocessor.alibabacloud_aisearch_rerank + options: + members: + - AlibabaCloudAISearchRerank diff --git a/docs/docs/api_reference/postprocessor/xinference_rerank.md b/docs/docs/api_reference/postprocessor/xinference_rerank.md new file mode 100644 index 0000000000000..11d71202e448f --- /dev/null +++ b/docs/docs/api_reference/postprocessor/xinference_rerank.md @@ -0,0 +1,4 @@ +::: llama_index.postprocessor.xinference_rerank + options: + members: + - XinferenceRerank diff --git a/docs/docs/api_reference/readers/alibabacloud_aisearch.md b/docs/docs/api_reference/readers/alibabacloud_aisearch.md new file mode 100644 index 0000000000000..3b3c2e5ed6567 --- /dev/null +++ b/docs/docs/api_reference/readers/alibabacloud_aisearch.md @@ -0,0 +1,5 @@ +::: llama_index.readers.alibabacloud_aisearch + options: + members: + - AlibabaCloudAISearchDocumentReader + - AlibabaCloudAISearchImageReader diff --git a/docs/docs/api_reference/selectors/notdiamond.md b/docs/docs/api_reference/selectors/notdiamond.md new file mode 100644 index 0000000000000..fd61391b877ba --- /dev/null +++ b/docs/docs/api_reference/selectors/notdiamond.md @@ -0,0 +1,4 @@ +::: llama_index.selectors.notdiamond + options: + members: + - NotDiamondSelector diff --git a/docs/docs/api_reference/storage/chat_store/dynamodb.md b/docs/docs/api_reference/storage/chat_store/dynamodb.md new file mode 100644 index 0000000000000..2662a45d41dfa --- /dev/null +++ b/docs/docs/api_reference/storage/chat_store/dynamodb.md @@ -0,0 +1,4 @@ +::: llama_index.storage.chat_store.dynamodb + options: + members: + - DynamoDBChatStore diff --git a/docs/docs/api_reference/storage/graph_stores/falkordb.md b/docs/docs/api_reference/storage/graph_stores/falkordb.md index 12906a65f9a48..d7ab23ee8c1a9 100644 --- a/docs/docs/api_reference/storage/graph_stores/falkordb.md +++ b/docs/docs/api_reference/storage/graph_stores/falkordb.md @@ -2,3 +2,4 @@ options: members: - FalkorDBGraphStore + - FalkorDBPropertyGraphStore diff --git a/docs/docs/api_reference/storage/graph_stores/kuzu.md b/docs/docs/api_reference/storage/graph_stores/kuzu.md index a19d228e43df1..e1a6deeff08ea 100644 --- a/docs/docs/api_reference/storage/graph_stores/kuzu.md +++ b/docs/docs/api_reference/storage/graph_stores/kuzu.md @@ -2,3 +2,4 @@ options: members: - KuzuGraphStore + - KuzuPropertyGraphStore diff --git a/docs/docs/api_reference/storage/vector_store/azurecosmosnosql.md b/docs/docs/api_reference/storage/vector_store/azurecosmosnosql.md new file mode 100644 index 0000000000000..39562fb196cfb --- /dev/null +++ b/docs/docs/api_reference/storage/vector_store/azurecosmosnosql.md @@ -0,0 +1,4 @@ +::: llama_index.vector_stores.azurecosmosnosql + options: + members: + - AzureCosmosDBMongoDBVectorSearch diff --git a/docs/docs/api_reference/storage/vector_store/mariadb.md b/docs/docs/api_reference/storage/vector_store/mariadb.md new file mode 100644 index 0000000000000..a6c481a830ae2 --- /dev/null +++ b/docs/docs/api_reference/storage/vector_store/mariadb.md @@ -0,0 +1,4 @@ +::: llama_index.vector_stores.mariadb + options: + members: + - MariaDBVectorStore diff --git a/docs/docs/api_reference/storage/vector_store/tablestore.md b/docs/docs/api_reference/storage/vector_store/tablestore.md new file mode 100644 index 0000000000000..d74970ab7ce48 --- /dev/null +++ b/docs/docs/api_reference/storage/vector_store/tablestore.md @@ -0,0 +1,4 @@ +::: llama_index.vector_stores.tablestore + options: + members: + - TablestoreVectorStore diff --git a/docs/docs/api_reference/workflow/retry_policy.md b/docs/docs/api_reference/workflow/retry_policy.md new file mode 100644 index 0000000000000..755128224ab12 --- /dev/null +++ b/docs/docs/api_reference/workflow/retry_policy.md @@ -0,0 +1,5 @@ +::: llama_index.core.workflow.retry_policy + options: + members: + - RetryPolicy + - ConstantDelayRetryPolicy diff --git a/docs/docs/community/integrations.md b/docs/docs/community/integrations.md index 5a72cb856d480..9fdfcd31bad7e 100644 --- a/docs/docs/community/integrations.md +++ b/docs/docs/community/integrations.md @@ -22,7 +22,7 @@ We support [a huge number of LLMs](../module_guides/models/llms/modules.md). ## Observability/Tracing/Evaluation -Check out our [one-click observability](../module_guides/observability/observability.md) page +Check out our [one-click observability](../module_guides/observability/index.md) page for full tracing integrations. ## Structured Outputs diff --git a/docs/docs/community/integrations/graph_stores.md b/docs/docs/community/integrations/graph_stores.md index 5404ce4620cc5..659e6aae43d8a 100644 --- a/docs/docs/community/integrations/graph_stores.md +++ b/docs/docs/community/integrations/graph_stores.md @@ -21,11 +21,14 @@ See the associated guides below: ## `KuzuGraphStore` -We support a `KuzuGraphStore` integration, for persisting graphs directly in [Kuzu](https://kuzudb.com). +We support a `KuzuGraphStore` integration, for persisting triples directly in [Kuzu](https://kuzudb.com). +Additionally, we support the `PropertyGraphIndex`, which allows you to store and query property graphs +using a Kuzu backend. See the associated guides below: - [Kuzu Graph Store](../../examples/index_structs/knowledge_graph/KuzuGraphDemo.ipynb) +- [Kuzu Graph Store](../../examples/property_graph/property_graph_kuzu.ipynb) ## `FalkorDBGraphStore` diff --git a/docs/docs/community/integrations/vector_stores.md b/docs/docs/community/integrations/vector_stores.md index 33e48c1eebf15..72d480137e049 100644 --- a/docs/docs/community/integrations/vector_stores.md +++ b/docs/docs/community/integrations/vector_stores.md @@ -28,6 +28,7 @@ as the storage backend for `VectorStoreIndex`. - txtai (`TxtaiVectorStore`). [Installation](https://neuml.github.io/txtai/install/). - Jaguar (`JaguarVectorStore`). [Installation](http://www.jaguardb.com/docsetup.html). - Lantern (`LanternVectorStore`). [Quickstart](https://docs.lantern.dev/get-started/overview). +- MariaDB (`MariaDBVectorStore`). [MariaDB Vector Overview](https://mariadb.com/kb/en/vector-overview/) - Milvus (`MilvusVectorStore`). [Installation](https://milvus.io/docs) - MongoDB Atlas (`MongoDBAtlasVectorSearch`). [Installation/Quickstart](https://www.mongodb.com/atlas/database). - MyScale (`MyScaleVectorStore`). [Quickstart](https://docs.myscale.com/en/quickstart/). [Installation/Python Client](https://docs.myscale.com/en/python-client/). @@ -38,6 +39,7 @@ as the storage backend for `VectorStoreIndex`. - Redis (`RedisVectorStore`). [Installation](https://redis.io/docs/latest/operate/oss_and_stack/install/install-stack/). - Relyt (`RelytVectorStore`). [Quickstart](https://docs.relyt.cn/docs/vector-engine/). - Supabase (`SupabaseVectorStore`). [Quickstart](https://supabase.github.io/vecs/api/). +- Tablestore (`Tablestore`). [Installation](https://www.aliyun.com/product/ots). - TiDB (`TiDBVectorStore`). [Quickstart](../../examples/vector_stores/TiDBVector.ipynb). [Installation](https://tidb.cloud/ai). [Python Client](https://github.com/pingcap/tidb-vector-python). - TimeScale (`TimescaleVectorStore`). [Installation](https://github.com/timescale/python-vector). - Upstash (`UpstashVectorStore`). [Quickstart](https://upstash.com/docs/vector/overall/getstarted) @@ -483,6 +485,22 @@ vector_store.logout() **Note**: Client(requires jaguardb-http-client) <--> Http Gateway <--> JaguarDB Server Client side needs to run: "pip install -U jaguardb-http-client" +**MariaDB** + +```python +from llama_index.vector_stores.mariadb import MariaDBVectorStore + +vector_store = MariaDBVectorStore.from_params( + host="localhost", + port=3306, + user="llamaindex", + password="password", + database="vectordb", + table_name="llama_index_vectorstore", + embed_dim=1536, # OpenAI embedding dimension +) +``` + **Milvus** - Milvus Index offers the ability to store both Documents and their embeddings. @@ -644,6 +662,38 @@ vector_store = SingleStoreVectorStore( ) ``` +**Tablestore** + +```python +import os +import tablestore +from llama_index.vector_stores.tablestore import TablestoreVectorStore + +vector_store = TablestoreVectorStore( + endpoint=os.getenv("end_point"), + instance_name=os.getenv("instance_name"), + access_key_id=os.getenv("access_key_id"), + access_key_secret=os.getenv("access_key_secret"), + vector_dimension=512, + vector_metric_type=tablestore.VectorMetricType.VM_COSINE, + # metadata mapping is used to filter non-vector fields. + metadata_mappings=[ + tablestore.FieldSchema( + "type", + tablestore.FieldType.KEYWORD, + index=True, + enable_sort_and_agg=True, + ), + tablestore.FieldSchema( + "time", + tablestore.FieldType.LONG, + index=True, + enable_sort_and_agg=True, + ), + ], +) +``` + **TiDB** ```python @@ -900,6 +950,7 @@ documents = reader.load_data( - [Rockset](../../examples/vector_stores/RocksetIndexDemo.ipynb) - [Simple](../../examples/vector_stores/SimpleIndexDemo.ipynb) - [Supabase](../../examples/vector_stores/SupabaseVectorIndexDemo.ipynb) +- [Tablestore](../../examples/vector_stores/TablestoreDemo.ipynb) - [Tair](../../examples/vector_stores/TairIndexDemo.ipynb) - [Tencent](../../examples/vector_stores/TencentVectorDBIndexDemo.ipynb) - [Timesacle](../../examples/vector_stores/Timescalevector.ipynb) diff --git a/docs/docs/composability/diagram.png b/docs/docs/composability/diagram.png deleted file mode 100644 index 9734b399757d0..0000000000000 Binary files a/docs/docs/composability/diagram.png and /dev/null differ diff --git a/docs/docs/composability/diagram_b0.png b/docs/docs/composability/diagram_b0.png deleted file mode 100644 index 0e080daaac368..0000000000000 Binary files a/docs/docs/composability/diagram_b0.png and /dev/null differ diff --git a/docs/docs/composability/diagram_b1.png b/docs/docs/composability/diagram_b1.png deleted file mode 100644 index 31a6d3055ff62..0000000000000 Binary files a/docs/docs/composability/diagram_b1.png and /dev/null differ diff --git a/docs/docs/composability/diagram_q1.png b/docs/docs/composability/diagram_q1.png deleted file mode 100644 index feb6cbb723f8c..0000000000000 Binary files a/docs/docs/composability/diagram_q1.png and /dev/null differ diff --git a/docs/docs/composability/diagram_q2.png b/docs/docs/composability/diagram_q2.png deleted file mode 100644 index f89af4216df9e..0000000000000 Binary files a/docs/docs/composability/diagram_q2.png and /dev/null differ diff --git a/docs/docs/contribution/contrib.png b/docs/docs/contribution/contrib.png deleted file mode 100644 index ebb70d844aeff..0000000000000 Binary files a/docs/docs/contribution/contrib.png and /dev/null differ diff --git a/docs/docs/data_connectors/llamahub.png b/docs/docs/data_connectors/llamahub.png deleted file mode 100644 index db2b01117da9c..0000000000000 Binary files a/docs/docs/data_connectors/llamahub.png and /dev/null differ diff --git a/docs/docs/embeddings/doc_example.jpeg b/docs/docs/embeddings/doc_example.jpeg deleted file mode 100644 index 81549c900c0c6..0000000000000 Binary files a/docs/docs/embeddings/doc_example.jpeg and /dev/null differ diff --git a/docs/docs/evaluation/eval_query_response_context.png b/docs/docs/evaluation/eval_query_response_context.png deleted file mode 100644 index d7a633cdfb2a9..0000000000000 Binary files a/docs/docs/evaluation/eval_query_response_context.png and /dev/null differ diff --git a/docs/docs/evaluation/eval_query_sources.png b/docs/docs/evaluation/eval_query_sources.png deleted file mode 100644 index d6f4b222c5131..0000000000000 Binary files a/docs/docs/evaluation/eval_query_sources.png and /dev/null differ diff --git a/docs/docs/evaluation/eval_response_context.png b/docs/docs/evaluation/eval_response_context.png deleted file mode 100644 index c3800d44475b1..0000000000000 Binary files a/docs/docs/evaluation/eval_response_context.png and /dev/null differ diff --git a/docs/docs/examples/agent/agentic_rag_using_vertex_ai.ipynb b/docs/docs/examples/agent/agentic_rag_using_vertex_ai.ipynb index ba71c173741d0..0dc732b96913f 100644 --- a/docs/docs/examples/agent/agentic_rag_using_vertex_ai.ipynb +++ b/docs/docs/examples/agent/agentic_rag_using_vertex_ai.ipynb @@ -439,8 +439,7 @@ "from llama_index.core.vector_stores import MetadataFilters\n", "from pathlib import Path\n", "\n", - "from llama_index.core.agent import FunctionCallingAgentWorker\n", - "from llama_index.core.agent import AgentRunner" + "from llama_index.core.agent import FunctionCallingAgent" ] }, { @@ -936,11 +935,10 @@ "metadata": {}, "outputs": [], "source": [ - "# Create Agent Runner\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "# Create Agent\n", + "agent = FunctionCallingAgent.from_tools(\n", " [vector_query_tool, summary_tool], llm=vertex_gemini, verbose=True\n", - ")\n", - "agent = AgentRunner(agent_worker)" + ")" ] }, { @@ -1035,7 +1033,7 @@ "metadata": {}, "outputs": [], "source": [ - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " tool_retriever=obj_retriever,\n", " llm=vertex_gemini,\n", " system_prompt=\"\"\" \\\n", @@ -1044,8 +1042,7 @@ "\n", "\"\"\",\n", " verbose=True,\n", - ")\n", - "agent = AgentRunner(agent_worker)" + ")" ] }, { diff --git a/docs/docs/examples/agent/agentic_rag_with_llamaindex_and_vertexai_managed_index.ipynb b/docs/docs/examples/agent/agentic_rag_with_llamaindex_and_vertexai_managed_index.ipynb index 45aa3b6f26192..1f34579c77ceb 100644 --- a/docs/docs/examples/agent/agentic_rag_with_llamaindex_and_vertexai_managed_index.ipynb +++ b/docs/docs/examples/agent/agentic_rag_with_llamaindex_and_vertexai_managed_index.ipynb @@ -157,8 +157,7 @@ "from llama_index.core.vector_stores import MetadataFilters\n", "from pathlib import Path\n", "\n", - "from llama_index.core.agent import FunctionCallingAgentWorker\n", - "from llama_index.core.agent import AgentRunner" + "from llama_index.core.agent import FunctionCallingAgent" ] }, { @@ -829,11 +828,10 @@ "metadata": {}, "outputs": [], "source": [ - "# Create Agent Runner\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "# Create Agent\n", + "agent = FunctionCallingAgent.from_tools(\n", " [vector_query_tool, summary_tool], llm=vertex_gemini, verbose=True\n", - ")\n", - "agent = AgentRunner(agent_worker)" + ")" ] }, { @@ -927,7 +925,7 @@ "metadata": {}, "outputs": [], "source": [ - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " tool_retriever=obj_retriever,\n", " llm=vertex_gemini,\n", " system_prompt=\"\"\" \\\n", @@ -936,8 +934,7 @@ "\n", "\"\"\",\n", " verbose=True,\n", - ")\n", - "agent = AgentRunner(agent_worker)" + ")" ] }, { diff --git a/docs/docs/examples/agent/anthropic_agent.ipynb b/docs/docs/examples/agent/anthropic_agent.ipynb index 3906c27336efe..477def98ad382 100644 --- a/docs/docs/examples/agent/anthropic_agent.ipynb +++ b/docs/docs/examples/agent/anthropic_agent.ipynb @@ -170,15 +170,14 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [multiply_tool, add_tool],\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=False,\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { @@ -265,13 +264,12 @@ ], "source": [ "# enable parallel function calling\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [multiply_tool, add_tool],\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=True,\n", ")\n", - "agent = agent_worker.as_agent()\n", "response = await agent.achat(\"What is (121 * 3) + (5 * 8)?\")\n", "print(str(response))" ] @@ -358,12 +356,11 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [query_engine_tool], llm=llm, verbose=True\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { diff --git a/docs/docs/examples/agent/bedrock_converse_agent.ipynb b/docs/docs/examples/agent/bedrock_converse_agent.ipynb index de42a55ccda73..9f2f8b65f3435 100644 --- a/docs/docs/examples/agent/bedrock_converse_agent.ipynb +++ b/docs/docs/examples/agent/bedrock_converse_agent.ipynb @@ -175,15 +175,14 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [multiply_tool, add_tool],\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=False,\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { @@ -287,12 +286,11 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [query_engine_tool], llm=llm, verbose=True\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { diff --git a/docs/docs/examples/agent/lats_agent.ipynb b/docs/docs/examples/agent/lats_agent.ipynb index b4059435edcb3..2dd3a23de8644 100644 --- a/docs/docs/examples/agent/lats_agent.ipynb +++ b/docs/docs/examples/agent/lats_agent.ipynb @@ -7,7 +7,7 @@ "source": [ "# Language Agent Tree Search\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "[LATS (Language Agent Tree Search)](https://arxiv.org/pdf/2310.04406.pdf) by Zhou et al. combines LLM capabilities in planning, acting, and reasoning within a Monte Carlo tree search framework, allowing for deliberate and adaptive problem-solving guided by external feedback and self-reflection.\n", "\n", diff --git a/docs/docs/examples/agent/memory/composable_memory.ipynb b/docs/docs/examples/agent/memory/composable_memory.ipynb index 144e9fdfa395c..9710b498da7c4 100644 --- a/docs/docs/examples/agent/memory/composable_memory.ipynb +++ b/docs/docs/examples/agent/memory/composable_memory.ipynb @@ -510,7 +510,7 @@ "source": [ "from llama_index.llms.openai import OpenAI\n", "from llama_index.core.tools import FunctionTool\n", - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", "import nest_asyncio\n", "\n", @@ -583,10 +583,12 @@ "outputs": [], "source": [ "llm = OpenAI(model=\"gpt-3.5-turbo-0613\")\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", - " [multiply_tool, mystery_tool], llm=llm, verbose=True\n", - ")\n", - "agent = agent_worker.as_agent(memory=composable_memory)" + "agent = FunctionCallingAgent.from_tools(\n", + " [multiply_tool, mystery_tool],\n", + " llm=llm,\n", + " memory=composable_memory,\n", + " verbose=True,\n", + ")" ] }, { @@ -685,10 +687,9 @@ "outputs": [], "source": [ "llm = OpenAI(model=\"gpt-3.5-turbo-0613\")\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent_without_memory = FunctionCallingAgent.from_tools(\n", " [multiply_tool, mystery_tool], llm=llm, verbose=True\n", - ")\n", - "agent_without_memory = agent_worker.as_agent()" + ")" ] }, { @@ -737,9 +738,7 @@ "outputs": [], "source": [ "llm = OpenAI(model=\"gpt-3.5-turbo-0613\")\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", - " [multiply_tool, mystery_tool], llm=llm, verbose=True\n", - ")\n", + "\n", "composable_memory = SimpleComposableMemory.from_defaults(\n", " primary_memory=ChatMemoryBuffer.from_defaults(),\n", " secondary_memory_sources=[\n", @@ -749,7 +748,13 @@ " # later will use original vector_memory again\n", " ],\n", ")\n", - "agent_with_memory = agent_worker.as_agent(memory=composable_memory)" + "\n", + "agent_with_memory = FunctionCallingAgent.from_tools(\n", + " [multiply_tool, mystery_tool],\n", + " llm=llm,\n", + " memory=composable_memory,\n", + " verbose=True,\n", + ")" ] }, { @@ -937,9 +942,9 @@ ], "metadata": { "kernelspec": { - "display_name": "llama-index-core", + "display_name": "llama-index-caVs7DDe-py3.11", "language": "python", - "name": "llama-index-core" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/docs/docs/examples/agent/mistral_agent.ipynb b/docs/docs/examples/agent/mistral_agent.ipynb index d3da7876f3345..d69a729336767 100644 --- a/docs/docs/examples/agent/mistral_agent.ipynb +++ b/docs/docs/examples/agent/mistral_agent.ipynb @@ -172,15 +172,14 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [multiply_tool, add_tool],\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=False,\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { @@ -267,13 +266,13 @@ ], "source": [ "# enable parallel function calling\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [multiply_tool, add_tool],\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=True,\n", ")\n", - "agent = agent_worker.as_agent()\n", + "\n", "response = await agent.achat(\"What is (121 * 3) + (5 * 8)?\")\n", "print(str(response))" ] @@ -360,12 +359,11 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [query_engine_tool], llm=llm, verbose=True\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { diff --git a/docs/docs/examples/agent/nvidia_agent.ipynb b/docs/docs/examples/agent/nvidia_agent.ipynb new file mode 100644 index 0000000000000..d7711bc4538c1 --- /dev/null +++ b/docs/docs/examples/agent/nvidia_agent.ipynb @@ -0,0 +1,734 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Function Calling NVIDIA Agent" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook shows you how to use our NVIDIA agent, powered by function calling capabilities." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Initial Setup " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's start by importing some simple building blocks. \n", + "\n", + "The main thing we need is:\n", + "1. the NVIDIA NIM Endpoint (using our own `llama_index` LLM class)\n", + "2. a place to keep conversation history \n", + "3. a definition for tools that our agent can use." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33mDEPRECATION: pytest-httpx 0.21.0 has a non-standard dependency specifier pytest<8.*,>=6.*. pip 24.1 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytest-httpx or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n", + "\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install --upgrade --quiet llama-index-llms-nvidia" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Valid NVIDIA_API_KEY already in environment. Delete to reset\n" + ] + } + ], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "# del os.environ['NVIDIA_API_KEY'] ## delete key and reset\n", + "if os.environ.get(\"NVIDIA_API_KEY\", \"\").startswith(\"nvapi-\"):\n", + " print(\"Valid NVIDIA_API_KEY already in environment. Delete to reset\")\n", + "else:\n", + " nvapi_key = getpass.getpass(\"NVAPI Key (starts with nvapi-): \")\n", + " assert nvapi_key.startswith(\n", + " \"nvapi-\"\n", + " ), f\"{nvapi_key[:5]}... is not a valid key\"\n", + " os.environ[\"NVIDIA_API_KEY\"] = nvapi_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.nvidia import NVIDIA\n", + "from llama_index.core.tools import FunctionTool\n", + "from llama_index.embeddings.nvidia import NVIDIAEmbedding" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's define some very simple calculator tools for our agent." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiple two integers and returns the result integer\"\"\"\n", + " return a * b\n", + "\n", + "\n", + "multiply_tool = FunctionTool.from_defaults(fn=multiply)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two integers and returns the result integer\"\"\"\n", + " return a + b\n", + "\n", + "\n", + "add_tool = FunctionTool.from_defaults(fn=add)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we initialize a simple NVIDIA agent with calculator functions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "llm = NVIDIA(\"meta/llama-3.1-70b-instruct\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.agent import FunctionCallingAgent\n", + "\n", + "agent = FunctionCallingAgent.from_tools(\n", + " [multiply_tool, add_tool],\n", + " llm=llm,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Chat" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: What is (121 * 3) + 42?\n", + "=== Calling Function ===\n", + "Calling function: multiply with args: {\"a\": 121, \"b\": 3}\n", + "=== Function Output ===\n", + "363\n", + "=== Calling Function ===\n", + "Calling function: add with args: {\"a\": 363, \"b\": 42}\n", + "=== Function Output ===\n", + "405\n", + "=== LLM Response ===\n", + "The answer is 405.\n", + "The answer is 405.\n" + ] + } + ], + "source": [ + "response = agent.chat(\"What is (121 * 3) + 42?\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ToolOutput(content='363', tool_name='multiply', raw_input={'args': (), 'kwargs': {'a': 121, 'b': 3}}, raw_output=363, is_error=False), ToolOutput(content='405', tool_name='add', raw_input={'args': (), 'kwargs': {'a': 363, 'b': 42}}, raw_output=405, is_error=False)]\n" + ] + } + ], + "source": [ + "# inspect sources\n", + "print(response.sources)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Async Chat" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: What is 121 * 3?\n", + "=== Calling Function ===\n", + "Calling function: multiply with args: {\"a\": 121, \"b\": 3}\n", + "=== Function Output ===\n", + "363\n", + "=== LLM Response ===\n", + "The answer is 363.\n", + "The answer is 363.\n" + ] + } + ], + "source": [ + "response = await agent.achat(\"What is 121 * 3?\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Agent with Personality" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can specify a system prompt to give the agent additional instruction or personality." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.prompts.system import SHAKESPEARE_WRITING_ASSISTANT" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "agent = FunctionCallingAgent.from_tools(\n", + " [multiply_tool, add_tool],\n", + " llm=llm,\n", + " verbose=True,\n", + " system_prompt=SHAKESPEARE_WRITING_ASSISTANT,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: Hi\n", + "Fair greeting unto thee, kind sir or madam! 'Tis a pleasure to make thy acquaintance. How doth thy day fare? Doth thou seek inspiration for a tale, a poem, or perhaps a song, penned in the grand style of the Bard himself?\n" + ] + } + ], + "source": [ + "response = agent.chat(\"Hi\")\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = agent.chat(\"Tell me a story\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# NVIDIA Agent with RAG/Query Engine Tools" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!mkdir -p 'data/10k/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/uber_2021.pdf' -O 'data/10k/uber_2021.pdf'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", + "from llama_index.core import SimpleDirectoryReader, VectorStoreIndex\n", + "\n", + "embed_model = NVIDIAEmbedding(model=\"NV-Embed-QA\", truncate=\"END\")\n", + "\n", + "# load data\n", + "uber_docs = SimpleDirectoryReader(\n", + " input_files=[\"./data/10k/uber_2021.pdf\"]\n", + ").load_data()\n", + "# build index\n", + "uber_index = VectorStoreIndex.from_documents(\n", + " uber_docs, embed_model=embed_model\n", + ")\n", + "uber_engine = uber_index.as_query_engine(similarity_top_k=3, llm=llm)\n", + "query_engine_tool = QueryEngineTool(\n", + " query_engine=uber_engine,\n", + " metadata=ToolMetadata(\n", + " name=\"uber_10k\",\n", + " description=(\n", + " \"Provides information about Uber financials for year 2021. \"\n", + " \"Use a detailed plain text question as input to the tool.\"\n", + " ),\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "agent = FunctionCallingAgent.from_tools(\n", + " [query_engine_tool], llm=llm, verbose=True\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = agent.chat(\n", + " \"Tell me both the risk factors and tailwinds for Uber? Do two parallel tool calls.\",\n", + " allow_parallel_tool_calls=True,\n", + ")\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# ReAct Agent " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.agent import ReActAgent" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "agent = ReActAgent.from_tools([multiply_tool, add_tool], llm=llm, verbose=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> Running step a61e9980-2b9c-4a78-9950-cabe13827f73. Step input: What is 20+(2*4)? Calculate step by step \n", + "\u001b[1;3;38;5;200mThought: To calculate 20+(2*4), I need to follow the order of operations (PEMDAS). First, I need to calculate the multiplication part, which is 2*4. I will use the multiply tool to do this.\n", + "Action: multiply\n", + "Action Input: {'a': 2, 'b': 4}\n", + "\u001b[0m\u001b[1;3;34mObservation: 8\n", + "\u001b[0m> Running step 73418308-49cc-4689-bb39-b83d6a7cf7ac. Step input: None\n", + "\u001b[1;3;38;5;200mThought: Now that I have the result of the multiplication, which is 8, I can proceed to add 20 to it. I will use the add tool to do this.\n", + "Action: add\n", + "Action Input: {'a': 20, 'b': 8}\n", + "\u001b[0m\u001b[1;3;34mObservation: 28\n", + "\u001b[0m> Running step 73464ecd-b266-47ef-8f1b-c0aa0e43ad60. Step input: None\n", + "\u001b[1;3;38;5;200mThought: I have now calculated the entire expression 20+(2*4) and have the final result.\n", + "Answer: 28\n", + "\u001b[0m" + ] + } + ], + "source": [ + "response = agent.chat(\"What is 20+(2*4)? Calculate step by step \")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> Running step 079f0d12-e0f7-48cd-a1be-9f8a9ce98b9c. Step input: What is 20+2*4? Calculate step by step\n", + "\u001b[1;3;38;5;200mThought: To calculate 20+2*4, I need to follow the order of operations (PEMDAS). First, I need to calculate the multiplication.\n", + "Action: multiply\n", + "Action Input: {'a': 2, 'b': 4}\n", + "\u001b[0m\u001b[1;3;34mObservation: 8\n", + "\u001b[0m> Running step 9d3a64e8-0b14-4721-9d5c-dd51b03ff3fa. Step input: None\n", + "\u001b[1;3;38;5;200mThought: Now that I have the result of the multiplication, I can add 20 to it.\n", + "Action: add\n", + "Action Input: {'a': 20, 'b': 8}\n", + "\u001b[0m\u001b[1;3;34mObservation: 28\n", + "\u001b[0m> Running step 14d6f623-5c92-405b-88d5-468805429e0b. Step input: None\n", + " 20+2*4 = 28" + ] + } + ], + "source": [ + "response_gen = agent.stream_chat(\"What is 20+2*4? Calculate step by step\")\n", + "response_gen.print_response_stream()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## View Prompts\n", + "\n", + "Let's take a look at the core system prompt powering the ReAct agent! \n", + "\n", + "Within the agent, the current conversation history is dumped below this line." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "prompt_dict = agent.get_prompts()\n", + "for k, v in prompt_dict.items():\n", + " print(f\"Prompt: {k}\\n\\nValue: {v.template}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Customizing the Prompt\n", + "\n", + "For fun, let's try instructing the agent to output the answer along with reasoning in bullet points. See \"## Additional Rules\" section." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import PromptTemplate\n", + "\n", + "react_system_header_str = \"\"\"\\\n", + "\n", + "You are designed to help with a variety of tasks, from answering questions \\\n", + " to providing summaries to other types of analyses.\n", + "\n", + "## Tools\n", + "You have access to a wide variety of tools. You are responsible for using\n", + "the tools in any sequence you deem appropriate to complete the task at hand.\n", + "This may require breaking the task into subtasks and using different tools\n", + "to complete each subtask.\n", + "\n", + "You have access to the following tools:\n", + "{tool_desc}\n", + "\n", + "## Output Format\n", + "To answer the question, please use the following format.\n", + "\n", + "```\n", + "Thought: I need to use a tool to help me answer the question.\n", + "Action: tool name (one of {tool_names}) if using a tool.\n", + "Action Input: the input to the tool, in a JSON format representing the kwargs (e.g. {{\"input\": \"hello world\", \"num_beams\": 5}})\n", + "```\n", + "\n", + "Please ALWAYS start with a Thought.\n", + "\n", + "Please use a valid JSON format for the Action Input. Do NOT do this {{'input': 'hello world', 'num_beams': 5}}.\n", + "\n", + "If this format is used, the user will respond in the following format:\n", + "\n", + "```\n", + "Observation: tool response\n", + "```\n", + "\n", + "You should keep repeating the above format until you have enough information\n", + "to answer the question without using any more tools. At that point, you MUST respond\n", + "in the one of the following two formats:\n", + "\n", + "```\n", + "Thought: I can answer without using any more tools.\n", + "Answer: [your answer here]\n", + "```\n", + "\n", + "```\n", + "Thought: I cannot answer the question with the provided tools.\n", + "Answer: Sorry, I cannot answer your query.\n", + "```\n", + "\n", + "## Additional Rules\n", + "- The answer MUST contain a sequence of bullet points that explain how you arrived at the answer. This can include aspects of the previous conversation history.\n", + "- You MUST obey the function signature of each tool. Do NOT pass in no arguments if the function expects arguments.\n", + "\n", + "## Current Conversation\n", + "Below is the current conversation consisting of interleaving human and assistant messages.\n", + "\n", + "\"\"\"\n", + "react_system_prompt = PromptTemplate(react_system_header_str)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'agent_worker:system_prompt': PromptTemplate(metadata={'prompt_type': }, template_vars=['tool_desc', 'tool_names'], kwargs={}, output_parser=None, template_var_mappings=None, function_mappings=None, template='You are designed to help with a variety of tasks, from answering questions to providing summaries to other types of analyses.\\n\\n## Tools\\n\\nYou have access to a wide variety of tools. You are responsible for using the tools in any sequence you deem appropriate to complete the task at hand.\\nThis may require breaking the task into subtasks and using different tools to complete each subtask.\\n\\nYou have access to the following tools:\\n{tool_desc}\\n\\n\\n## Output Format\\n\\nPlease answer in the same language as the question and use the following format:\\n\\n```\\nThought: The current language of the user is: (user\\'s language). I need to use a tool to help me answer the question.\\nAction: tool name (one of {tool_names}) if using a tool.\\nAction Input: the input to the tool, in a JSON format representing the kwargs (e.g. {{\"input\": \"hello world\", \"num_beams\": 5}})\\n```\\n\\nPlease ALWAYS start with a Thought.\\n\\nNEVER surround your response with markdown code markers. You may use code markers within your response if you need to.\\n\\nPlease use a valid JSON format for the Action Input. Do NOT do this {{\\'input\\': \\'hello world\\', \\'num_beams\\': 5}}.\\n\\nIf this format is used, the user will respond in the following format:\\n\\n```\\nObservation: tool response\\n```\\n\\nYou should keep repeating the above format till you have enough information to answer the question without using any more tools. At that point, you MUST respond in the one of the following two formats:\\n\\n```\\nThought: I can answer without using any more tools. I\\'ll use the user\\'s language to answer\\nAnswer: [your answer here (In the same language as the user\\'s question)]\\n```\\n\\n```\\nThought: I cannot answer the question with the provided tools.\\nAnswer: [your answer here (In the same language as the user\\'s question)]\\n```\\n\\n## Current Conversation\\n\\nBelow is the current conversation consisting of interleaving human and assistant messages.\\n')}" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "agent.get_prompts()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "agent.update_prompts({\"agent_worker:system_prompt\": react_system_prompt})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> Running step 0e470036-3748-454b-8ae7-f61f1ab54fc1. Step input: What is 5+3+2\n", + "\u001b[1;3;38;5;200mThought: I need to use a tool to help me answer the question.\n", + "Action: add\n", + "Action Input: {'a': 5, 'b': 3}\n", + "\u001b[0m\u001b[1;3;34mObservation: 8\n", + "\u001b[0m> Running step f383f54d-8647-4382-a8c7-402d82c1a795. Step input: None\n", + "\u001b[1;3;38;5;200mThought: I need to use another tool to help me answer the question.\n", + "Action: add\n", + "Action Input: {'a': 8, 'b': 2}\n", + "\u001b[0m\u001b[1;3;34mObservation: 10\n", + "\u001b[0m> Running step 2ef5d129-810d-489d-b1e1-4c21fd30553f. Step input: None\n", + "\u001b[1;3;38;5;200mThought: I can answer without using any more tools.\n", + "Answer: 10\n", + "\n", + "* The problem asked for the sum of 5, 3, and 2.\n", + "* I used the add tool to first calculate 5 + 3 = 8.\n", + "* Then, I used the add tool again to calculate 8 + 2 = 10.\n", + "* Therefore, the final answer is 10.\n", + "\u001b[0m10\n", + "\n", + "* The problem asked for the sum of 5, 3, and 2.\n", + "* I used the add tool to first calculate 5 + 3 = 8.\n", + "* Then, I used the add tool again to calculate 8 + 2 = 10.\n", + "* Therefore, the final answer is 10.\n" + ] + } + ], + "source": [ + "agent.reset()\n", + "response = agent.chat(\"What is 5+3+2\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using the CoAAgentWorker\n", + "\n", + "By installing the CoAAgentPack, you also get access to the underlying agent worker. With this, you can setup the agent manually, as well as customize the prompts and output parsing." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.agent.coa import CoAAgentWorker\n", + "\n", + "worker = CoAAgentWorker.from_tools(\n", + " tools=[query_engine_tool],\n", + " llm=llm,\n", + " verbose=True,\n", + ")\n", + "\n", + "agent = worker.as_agent()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "==== Available Parsed Functions ====\n", + "def uber_10k(input: string):\n", + " \"\"\"Provides information about Uber financials for year 2021. Use a detailed plain text question as input to the tool.\"\"\"\n", + " ...\n", + "==== Generated Chain of Abstraction ====\n", + "Here is the abstract plan of reasoning:\n", + "\n", + "To answer this question, we need to understand Uber's revenue growth in 2021. We can use the [FUNC uber_10k(\"What was Uber's revenue growth in 2021?\") = y1] to get the relevant information. Then, we can compare this growth to Uber's overall performance in 2021 by analyzing the output y1.\n", + "\n", + "Note: Since the question is asking for a comparison, the final answer will require a manual analysis of the output y1, rather than a simple function call.\n", + "==== Executing uber_10k with inputs [\"What was Uber's revenue growth in 2021?\"] ====\n" + ] + }, + { + "data": { + "text/plain": [ + "AgentChatResponse(response='Based on the previous reasoning, we can analyze the output \"Uber\\'s revenue grew by 57% in 2021, increasing from $11,139 million in 2020 to $17,455 million in 2021.\" to answer the question. Since the question is asking for a comparison, we can infer that the revenue growth of Uber in 2021 was significant, with a 57% increase from the previous year. This suggests that Uber\\'s revenue growth in 2021 was strong, indicating a positive trend for the company.\\n\\nTherefore, the response to the question is:\\n\\nUber\\'s revenue growth in 2021 was strong, with a 57% increase from the previous year, indicating a positive trend for the company.', sources=[], source_nodes=[], is_dummy_stream=False, metadata=None)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "agent.chat(\"How did Ubers revenue growth compare to Uber in 2021?\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/examples/agent/return_direct_agent.ipynb b/docs/docs/examples/agent/return_direct_agent.ipynb index 2ac3dd8684ec0..be7069e29bbaf 100644 --- a/docs/docs/examples/agent/return_direct_agent.ipynb +++ b/docs/docs/examples/agent/return_direct_agent.ipynb @@ -138,7 +138,7 @@ "source": [ "from llama_index.llms.anthropic import Anthropic\n", "from llama_index.core.llms import ChatMessage\n", - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", "llm = Anthropic(model=\"claude-3-sonnet-20240229\", temperature=0.1)\n", "\n", @@ -154,7 +154,7 @@ " )\n", "]\n", "\n", - "worker = FunctionCallingAgentWorker(\n", + "agent = FunctionCallingAgent.from_tools(\n", " tools=[\n", " get_booking_state_tool,\n", " update_booking_tool,\n", @@ -166,9 +166,7 @@ " max_function_calls=10,\n", " allow_parallel_tool_calls=False,\n", " verbose=True,\n", - ")\n", - "\n", - "agent = worker.as_agent()" + ")" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_best.ipynb b/docs/docs/examples/chat_engine/chat_engine_best.ipynb index 01463a4306254..9e81b3c32d8a3 100644 --- a/docs/docs/examples/chat_engine/chat_engine_best.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_best.ipynb @@ -5,7 +5,7 @@ "id": "d89ae951", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_condense_plus_context.ipynb b/docs/docs/examples/chat_engine/chat_engine_condense_plus_context.ipynb index b9402cfa2de4d..7678c4ef65059 100644 --- a/docs/docs/examples/chat_engine/chat_engine_condense_plus_context.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_condense_plus_context.ipynb @@ -5,11 +5,10 @@ "id": "616a781c", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { - "attachments": {}, "cell_type": "markdown", "id": "18e20fbc-056b-44ac-b1fc-2d34b8e99bcc", "metadata": {}, @@ -19,7 +18,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "b99eea02-429c-40e4-99be-b82a89c8d070", "metadata": {}, @@ -28,7 +26,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "34d34fcc-e247-4d55-ab16-c3d633e2385a", "metadata": {}, @@ -40,7 +37,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "f1c3cbc6-98a8-4e0e-98eb-3c7fa09ba79f", "metadata": {}, @@ -77,7 +73,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "79db0610", "metadata": {}, @@ -97,7 +92,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "b314f279-bf7f-4e67-9f66-ebf783f08d38", "metadata": {}, @@ -106,7 +100,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "40d3d9e4", "metadata": {}, @@ -145,7 +138,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "e58d7ad9-d246-477e-acac-894ad5402f24", "metadata": {}, @@ -182,7 +174,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "63a4259d-89b5-49f8-b158-9eba5353d6f5", "metadata": {}, @@ -219,7 +210,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "67021e64-8665-4338-9fb4-c0f1d6361092", "metadata": {}, @@ -264,7 +254,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "c2c68de8-af58-4f7e-8759-19fc072873fd", "metadata": {}, @@ -311,7 +300,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "a65ad1a2", "metadata": {}, diff --git a/docs/docs/examples/chat_engine/chat_engine_condense_question.ipynb b/docs/docs/examples/chat_engine/chat_engine_condense_question.ipynb index 08d1c6cdb554c..414631bc8aa31 100644 --- a/docs/docs/examples/chat_engine/chat_engine_condense_question.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_condense_question.ipynb @@ -5,7 +5,7 @@ "id": "dae1c2c1", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_context.ipynb b/docs/docs/examples/chat_engine/chat_engine_context.ipynb index 6749ba1447e9a..11a25716b7986 100644 --- a/docs/docs/examples/chat_engine/chat_engine_context.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_context.ipynb @@ -5,7 +5,7 @@ "id": "616a781c", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_openai.ipynb b/docs/docs/examples/chat_engine/chat_engine_openai.ipynb index 4303cde620fe2..4b3efffb4250f 100644 --- a/docs/docs/examples/chat_engine/chat_engine_openai.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_openai.ipynb @@ -5,7 +5,7 @@ "id": "23e5dc2d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_personality.ipynb b/docs/docs/examples/chat_engine/chat_engine_personality.ipynb index 1ed6f254deb77..5248d893d881e 100644 --- a/docs/docs/examples/chat_engine/chat_engine_personality.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_personality.ipynb @@ -5,7 +5,7 @@ "id": "5a19abd0", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_react.ipynb b/docs/docs/examples/chat_engine/chat_engine_react.ipynb index ddd8f9e44e296..8d8d6e74c0f8b 100644 --- a/docs/docs/examples/chat_engine/chat_engine_react.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_react.ipynb @@ -5,7 +5,7 @@ "id": "beda3039", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/chat_engine/chat_engine_repl.ipynb b/docs/docs/examples/chat_engine/chat_engine_repl.ipynb index 581265d9dbbac..aa85e0c84e8f6 100644 --- a/docs/docs/examples/chat_engine/chat_engine_repl.ipynb +++ b/docs/docs/examples/chat_engine/chat_engine_repl.ipynb @@ -5,7 +5,7 @@ "id": "bdb508b5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/citation/pdf_page_reference.ipynb b/docs/docs/examples/citation/pdf_page_reference.ipynb index 7aad4bae9fb52..2ecff553a4215 100644 --- a/docs/docs/examples/citation/pdf_page_reference.ipynb +++ b/docs/docs/examples/citation/pdf_page_reference.ipynb @@ -17,7 +17,7 @@ "id": "18ffbd8c", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/cookbooks/codestral.ipynb b/docs/docs/examples/cookbooks/codestral.ipynb index 8a53a165a8adc..f3e7cf24dd802 100644 --- a/docs/docs/examples/cookbooks/codestral.ipynb +++ b/docs/docs/examples/cookbooks/codestral.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/cookbooks/cohere_retriever_eval.ipynb b/docs/docs/examples/cookbooks/cohere_retriever_eval.ipynb index b7fef176f6018..27899185bfe62 100644 --- a/docs/docs/examples/cookbooks/cohere_retriever_eval.ipynb +++ b/docs/docs/examples/cookbooks/cohere_retriever_eval.ipynb @@ -6,7 +6,7 @@ "id": "8a5706df", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/cookbooks/mistralai.ipynb b/docs/docs/examples/cookbooks/mistralai.ipynb index 8d55c1d727378..d615670af5a6b 100644 --- a/docs/docs/examples/cookbooks/mistralai.ipynb +++ b/docs/docs/examples/cookbooks/mistralai.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -218,7 +218,7 @@ "outputs": [], "source": [ "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", - "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgent\n", "\n", "query_engine_tools = [\n", " QueryEngineTool(\n", @@ -237,13 +237,12 @@ " ),\n", "]\n", "\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " query_engine_tools,\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=False,\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { @@ -378,7 +377,7 @@ "source": [ "from llama_index.core.tools import FunctionTool\n", "from llama_index.core.agent import (\n", - " FunctionCallingAgentWorker,\n", + " FunctionCallingAgent,\n", " ReActAgent,\n", ")" ] @@ -423,13 +422,12 @@ "metadata": {}, "outputs": [], "source": [ - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [multiply_tool, add_tool, subtract_tool],\n", " llm=llm,\n", " verbose=True,\n", " allow_parallel_tool_calls=False,\n", - ")\n", - "agent = agent_worker.as_agent()" + ")" ] }, { diff --git a/docs/docs/examples/cookbooks/mixedbread_reranker.ipynb b/docs/docs/examples/cookbooks/mixedbread_reranker.ipynb index 786d236ed4b63..ce365e1612b1a 100644 --- a/docs/docs/examples/cookbooks/mixedbread_reranker.ipynb +++ b/docs/docs/examples/cookbooks/mixedbread_reranker.ipynb @@ -5,7 +5,7 @@ "id": "964030f7-40e4-4398-a5ab-668aabcf3bad", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-2/Components_Of_LlamaIndex.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-2/Components_Of_LlamaIndex.ipynb new file mode 100644 index 0000000000000..e0648ed5bad0f --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-2/Components_Of_LlamaIndex.ipynb @@ -0,0 +1,1179 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Components Of LlamaIndex\n", + "\n", + "In this notebook we will demonstrate building RAG application and customize it using different components of LlamaIndex.\n", + "\n", + "1. Question Answering\n", + "2. Summarization.\n", + "3. ChatEngine.\n", + "4. Customizing QA System.\n", + "5. Index as Retriever.\n", + "\n", + "[ChatEngine Documentation](https://docs.llamaindex.ai/en/stable/module_guides/deploying/chat_engines/usage_pattern/ )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index\n", + "# !pip install llama-index-llms-openai\n", + "# !pip install llama-index-embeddings-openai" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup API Key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-04-28 04:34:09-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 75042 (73K) [text/plain]\n", + "Saving to: ‘paul_graham_essay.txt’\n", + "\n", + "\r", + "paul_graham_essay.t 0%[ ] 0 --.-KB/s \r", + "paul_graham_essay.t 100%[===================>] 73.28K --.-KB/s in 0.01s \n", + "\n", + "2024-04-28 04:34:09 (5.45 MB/s) - ‘paul_graham_essay.txt’ saved [75042/75042]\n", + "\n", + "--2024-04-28 04:34:09-- http://paul_graham_essay.txt/\n", + "Resolving paul_graham_essay.txt (paul_graham_essay.txt)... failed: Name or service not known.\n", + "wget: unable to resolve host address ‘paul_graham_essay.txt’\n", + "FINISHED --2024-04-28 04:34:09--\n", + "Total wall clock time: 0.2s\n", + "Downloaded: 1 files, 73K in 0.01s (5.45 MB/s)\n" + ] + } + ], + "source": [ + "!wget \"https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\" \"paul_graham_essay.txt\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex, SimpleDirectoryReader\n", + "\n", + "documents = SimpleDirectoryReader(\n", + " input_files=[\"paul_graham_essay.txt\"]\n", + ").load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set LLM and Embedding Model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core import Settings\n", + "\n", + "llm = OpenAI(model=\"gpt-3.5-turbo\", temperature=0.2)\n", + "embed_model = OpenAIEmbedding()\n", + "\n", + "Settings.llm = llm\n", + "Settings.embed_model = embed_model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.node_parser import TokenTextSplitter\n", + "\n", + "splitter = TokenTextSplitter(chunk_size=1024, chunk_overlap=20)\n", + "nodes = splitter.get_nodes_from_documents(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "TextNode(id_='957f1fe7-c30c-4ad4-a11d-5abd8df895e0', embedding=None, metadata={'file_path': 'paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-28', 'last_modified_date': '2024-04-28'}, excluded_embed_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], excluded_llm_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], relationships={: RelatedNodeInfo(node_id='bad72a51-4dbd-408a-8049-d86b67cd2077', node_type=, metadata={'file_path': 'paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-28', 'last_modified_date': '2024-04-28'}, hash='6faa426d61b507e88eb0c1e1e16b50d553ae2a88d0ea43b14f7b0323e893b6c9'), : RelatedNodeInfo(node_id='d453a07b-ef1a-4e2b-90b2-10d2f2a078c4', node_type=, metadata={}, hash='2fbfe91b14338e335b5524f5f35bfd2af93fc8b3e2eb6fe28a7d939160f80b7b')}, text='What I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach myself. Which meant learning Lisp, since in', start_char_idx=2, end_char_idx=4517, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create Index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex\n", + "\n", + "index = VectorStoreIndex(nodes) # VectorStoreIndex.from_documents(documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create QueryEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_engine = index.as_query_engine(similarity_top_k=5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = query_engine.query(\"What did Paul Graham do growing up?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Growing up, Paul Graham wrote short stories and dabbled in programming. He started with an IBM 1401 in 9th grade, using an early version of Fortran. Transitioning to microcomputers like the TRS-80, he wrote simple games, a rocket prediction program, and a word processor that his father used to write a book.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "5\n" + ] + } + ], + "source": [ + "print(len(response.source_nodes))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "NodeWithScore(node=TextNode(id_='9ba45177-a374-48d4-87b5-9beb6e32bb6d', embedding=None, metadata={'file_path': 'paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-28', 'last_modified_date': '2024-04-28'}, excluded_embed_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], excluded_llm_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], relationships={: RelatedNodeInfo(node_id='bad72a51-4dbd-408a-8049-d86b67cd2077', node_type=, metadata={'file_path': 'paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-28', 'last_modified_date': '2024-04-28'}, hash='6faa426d61b507e88eb0c1e1e16b50d553ae2a88d0ea43b14f7b0323e893b6c9'), : RelatedNodeInfo(node_id='4ceb5356-5b8f-4768-a639-deb04dee9f61', node_type=, metadata={'file_path': 'paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-28', 'last_modified_date': '2024-04-28'}, hash='ab58b78109f33f28b8667dd98727ece3316d066718546915091c769337d75f89'), : RelatedNodeInfo(node_id='09007719-f392-4539-ad7d-644b3389db08', node_type=, metadata={}, hash='7446bc458fbcf2e3e7ade3067e4f2c62efd00d275e9b004fe1862fc225ee2dfa')}, text='I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he\\'d made a lot of money the last time I\\'d lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\\n\\nHmph. Well, I\\'d do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it\\'s now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn\\'t so naive as to assume I could spring an overt Lisp on a general audience; we\\'d hide the parentheses, like Dylan did.\\n\\nBy then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn\\'t last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\\n\\nI started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn\\'t want to run a company — especially not a big one, which it was looking like this would have to be. I\\'d only started Viaweb because I needed the money. Now that I didn\\'t need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I\\'d build a subset that could be done as an open source project.\\n\\nMuch to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\\n\\nThe subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn\\'t even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\\n\\nThe following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we\\'d used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I\\'d created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\\n\\nWow, I thought, there\\'s an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI\\'ve worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I\\'d always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they\\'d seem more like rants', start_char_idx=40381, end_char_idx=44930, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), score=0.8332831239533887)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "response.source_nodes[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summarization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The document chronicles the author's journey through different endeavors, such as writing, programming, exploring artificial intelligence, Lisp programming, art, and painting still life.\n" + ] + } + ], + "source": [ + "from llama_index.core import SummaryIndex\n", + "\n", + "summary_index = SummaryIndex(nodes)\n", + "\n", + "query_engine = summary_index.as_query_engine()\n", + "\n", + "summary = query_engine.query(\"Provide the summary of the document.\")\n", + "print(summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ChatEngines" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Simple ChatEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.chat_engine import SimpleChatEngine\n", + "\n", + "chat_engine = SimpleChatEngine.from_defaults()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "AgentChatResponse(response='Hello! How can I assist you today?', sources=[], source_nodes=[], is_dummy_stream=False)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "response = chat_engine.chat(\"Hello\")\n", + "response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "AgentChatResponse(response='Steve Jobs was born on February 24, 1955, and grew up in California. As a child, he showed an early interest in electronics and technology. He attended Reed College but dropped out after just one semester. Jobs then went on to work at Atari before co-founding Apple Inc. in 1976 with Steve Wozniak. Throughout his career, Jobs was known for his innovative ideas, design aesthetics, and visionary leadership in the tech industry.', sources=[], source_nodes=[], is_dummy_stream=False)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "response = chat_engine.chat(\"What did steve jobs do growing up?\")\n", + "response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "AgentChatResponse(response='Yes, Steve Jobs did visit India in the 1970s. After dropping out of college, Jobs traveled to India in search of spiritual enlightenment. He spent several months in India, visiting various ashrams and seeking guidance from spiritual teachers. This experience had a profound impact on Jobs and influenced his personal philosophy and approach to life and work. Jobs later credited his time in India as a significant influence on his thinking and creativity.', sources=[], source_nodes=[], is_dummy_stream=False)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "response = chat_engine.chat(\"And did he visit India?\")\n", + "response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "===== Entering Chat REPL =====\n", + "Type \"exit\" to exit.\n", + "\n", + "Human: hi\n", + "Assistant: Hello! How can I assist you today?\n", + "\n", + "Human: who is steve jobs\n", + "Assistant: Steve Jobs was an American entrepreneur, business magnate, and inventor who co-founded Apple Inc. He is widely regarded as a pioneer of the personal computer revolution and played a key role in the development of iconic products such as the iPhone, iPad, and Macintosh computer. Jobs was known for his visionary leadership, innovative design aesthetic, and relentless pursuit of excellence. He passed away in 2011, but his legacy continues to inspire and influence the tech industry and beyond.\n", + "\n", + "Human: exit\n" + ] + } + ], + "source": [ + "chat_engine.chat_repl()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### CondenseQuestion ChatEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chat_engine = index.as_chat_engine(chat_mode=\"condense_question\", verbose=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Querying with: What did Paul Graham do after YC?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What did Paul Graham do after YC?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham started working on a new version of Arc with Robert in the summer of 2006. This version of Arc was compiled into Scheme, and to test it, Paul wrote Hacker News. Initially meant to be a news aggregator for startup founders, it was later renamed Hacker News with a broader topic to engage intellectual curiosity. Paul gradually stopped working on Arc as he focused more on writing essays and working on Y Combinator.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Querying with: What did Paul Graham do after working on Arc, Hacker News, and writing essays for Y Combinator?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What about after that?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham continued to work on Y Combinator after working on Arc, Hacker News, and writing essays.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Querying with: Can you tell me more about what Paul Graham did after working on Arc, Hacker News, and writing essays?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"Can you tell me more?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham continued his journey by collaborating with Jessica Livingston to start their own investment firm after realizing the need to implement their ideas without waiting for venture capitalists to make decisions.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Context ChatEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.memory import ChatMemoryBuffer\n", + "\n", + "memory = ChatMemoryBuffer.from_defaults(token_limit=3900)\n", + "\n", + "chat_engine = index.as_chat_engine(\n", + " chat_mode=\"context\",\n", + " memory=memory,\n", + " system_prompt=(\n", + " \"You are a chatbot, able to have normal interactions, as well as talk\"\n", + " \" about an essay discussing Paul Grahams life.\"\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello! How are you doing today?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"Hello\")\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "After Y Combinator (YC), Paul Graham continued to be involved in various projects. He worked on a new version of Arc in the summer of 2006 and created Hacker News, which became a popular platform for discussions on various topics. Additionally, he wrote essays and continued to be engaged in the startup community. His work after YC involved a mix of writing, software development, and other entrepreneurial endeavors.\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What did Paul Graham do after YC?\")\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "After his time at Y Combinator and his various projects, Paul Graham continued to write essays, share his insights on startups and technology, and engage with the entrepreneurial community. He also co-founded the venture capital firm Y Combinator, which has become one of the most successful startup accelerators in the world. Graham's influence and contributions to the tech industry have continued to be significant, and he remains a respected figure in the startup ecosystem.\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What about after that?\")\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Certainly! If you're interested in learning more about Paul Graham's life and his contributions to the tech industry, I can share insights from an essay that discusses various aspects of his journey. Would you like me to provide some excerpts from the essay for more detailed information?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"Can you tell me more?\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### CondenseContext ChatEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.memory import ChatMemoryBuffer\n", + "\n", + "memory = ChatMemoryBuffer.from_defaults(token_limit=3900)\n", + "\n", + "chat_engine = index.as_chat_engine(\n", + " chat_mode=\"condense_plus_context\",\n", + " memory=memory,\n", + " llm=llm,\n", + " context_prompt=(\n", + " \"You are a chatbot, able to have normal interactions, as well as talk\"\n", + " \" about an essay discussing Paul Grahams life.\"\n", + " \"Here are the relevant documents for the context:\\n\"\n", + " \"{context_str}\"\n", + " \"\\nInstruction: Use the previous chat history, or the context above, to interact and help the user.\"\n", + " ),\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Condensed question: Hello\n", + "Context: file_path: paul_graham_essay.txt\n", + "\n", + "then I thought perhaps it would be interesting to other people, and encouraging to those with similarly messy lives. So I wrote a more detailed version for others to read, and this is the last sentence of it.\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Notes\n", + "\n", + "[1] My experience skipped a step in the evolution of computers: time-sharing machines with interactive OSes. I went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting.\n", + "\n", + "[2] Italian words for abstract concepts can nearly always be predicted from their English cognates (except for occasional traps like polluzione). It's the everyday words that differ. So if you string together a lot of abstract concepts with a few simple verbs, you can make a little Italian go a long way.\n", + "\n", + "[3] I lived at Piazza San Felice 4, so my walk to the Accademia went straight down the spine of old Florence: past the Pitti, across the bridge, past Orsanmichele, between the Duomo and the Baptistery, and then up Via Ricasoli to Piazza San Marco. I saw Florence at street level in every possible condition, from empty dark winter evenings to sweltering summer days when the streets were packed with tourists.\n", + "\n", + "[4] You can of course paint people like still lives if you want to, and they're willing. That sort of portrait is arguably the apex of still life painting, though the long sitting does tend to produce pained expressions in the sitters.\n", + "\n", + "[5] Interleaf was one of many companies that had smart people and built impressive technology, and yet got crushed by Moore's Law. In the 1990s the exponential growth in the power of commodity (i.e. Intel) processors rolled up high-end, special-purpose hardware and software companies like a bulldozer.\n", + "\n", + "[6] The signature style seekers at RISD weren't specifically mercenary. In the art world, money and coolness are tightly coupled. Anything expensive comes to be seen as cool, and anything seen as cool will soon become equally expensive.\n", + "\n", + "[7] Technically the apartment wasn't rent-controlled but rent-stabilized, but this is a refinement only New Yorkers would know or care about. The point is that it was really cheap, less than half market price.\n", + "\n", + "[8] Most software you can launch as soon as it's done. But when the software is an online store builder and you're hosting the stores, if you don't have any users yet, that fact will be painfully obvious. So before we could launch publicly we had to launch privately, in the sense of recruiting an initial set of users and making sure they had decent-looking stores.\n", + "\n", + "[9] We'd had a code editor in Viaweb for users to define their own page styles. They didn't know it, but they were editing Lisp expressions underneath. But this wasn't an app editor, because the code ran when the merchants' sites were generated, not when shoppers visited them.\n", + "\n", + "[10] This was the first instance of what is now a familiar experience, and so was what happened next, when I read the comments and found they were full of angry people. How could I claim that Lisp was better than other languages? Weren't they all Turing complete? People who see the responses to essays I write sometimes tell me how sorry they feel for me, but I'm not exaggerating when I reply that it has always been like this, since the very beginning. It comes with the territory. An essay must tell readers things they don't already know, and some people dislike being told such things.\n", + "\n", + "[11] People put plenty of stuff on the internet in the 90s of course, but putting something online is not the same as publishing it online. Publishing online means you treat the online version as the (or at least a) primary version.\n", + "\n", + "[12] There is a general lesson here that our experience with Y Combinator also teaches: Customs continue to constrain you long after the restrictions that caused them have disappeared. Customary VC practice had once, like the customs about publishing essays, been based on real constraints. Startups had once been much more expensive to start, and proportionally rare. Now they could be cheap and common, but the VCs' customs still reflected the old world, just as customs about writing essays still reflected the constraints of the print era.\n", + "\n", + "Which in turn implies that people who are independent-minded (i.e. less influenced by custom) will have an advantage in fields affected by rapid change (where customs are more likely to be obsolete).\n", + "\n", + "Here's an interesting point, though: you can't always predict which fields will be affected by rapid change. Obviously software and venture capital will be, but who would have predicted that essay writing would be?\n", + "\n", + "[13] Y Combinator was not the original name. At first we were called Cambridge Seed. But we didn't want a regional name, in case someone copied us in Silicon Valley, so we renamed ourselves after one of the coolest\n", + "\n", + "file_path: paul_graham_essay.txt\n", + "\n", + "that online essays would be a marginal medium at first. Socially they'd seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\n", + "\n", + "One of the most conspicuous patterns I've noticed in my life is how well it has worked, for me at least, to work on things that weren't prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I'm writing, and I explain that it's an essay I'm going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\n", + "\n", + "It's not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it's a sign both that there's something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren't prestigious doesn't guarantee you're on the right track, it at least guarantees you're not on the most common type of wrong one.\n", + "\n", + "Over the next several years I wrote lots of essays about all kinds of different topics. O'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, which taught me how to cook for groups. And I bought another building in Cambridge, a former candy factory (and later, twas said, porn studio), to use as an office.\n", + "\n", + "One night in October 2003 there was a big party at my house. It was a clever idea of my friend Maria Daniels, who was one of the thursday diners. Three separate hosts would all invite their friends to one party. So for every guest, two thirds of the other guests would be people they didn't know but would probably like. One of the guests was someone I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days later I asked her out.\n", + "\n", + "Jessica was in charge of marketing at a Boston investment bank. This bank thought it understood startups, but over the next year, as she met friends of mine from the startup world, she was surprised how different reality was. And how colorful their stories were. So she decided to compile a book of interviews with startup founders.\n", + "\n", + "When the bank had financial problems and she had to fire half her staff, she started looking for a new job. In early 2005 she interviewed for a marketing job at a Boston VC firm. It took them weeks to make up their minds, and during this time I started telling her about all the things that needed to be fixed about venture capital. They should make a larger number of smaller investments instead of a handful of giant ones, they should be funding younger, more technical founders instead of MBAs, they should let the founders remain as CEO, and so on.\n", + "\n", + "One of my tricks for writing essays had always been to give talks. The prospect of having to stand up in front of a group of people and tell them something that won't waste their time is a great spur to the imagination. When the Harvard Computer Society, the undergrad computer club, asked me to give a talk, I decided I would tell them how to start a startup. Maybe they'd be able to avoid the worst of the mistakes we'd made.\n", + "\n", + "So I gave this talk, in the course of which I told them that the best sources of seed funding were successful startup founders, because then they'd be sources of advice too. Whereupon it seemed they were all looking expectantly at me. Horrified at the prospect of having my inbox flooded by business plans (if I'd only known), I blurted out \"But not me!\" and went on with the talk. But afterward it occurred to me that I should really stop procrastinating about angel investing. I'd been meaning to since Yahoo bought us, and now it was 7 years later and I still hadn't done one angel investment.\n", + "\n", + "Meanwhile I had been scheming with Robert and Trevor about projects we could work on together. I missed working with them, and it seemed like there had to be something we could collaborate on.\n", + "\n", + "As Jessica and I were walking home from dinner on March 11, at the corner of Garden and Walker streets, these three threads converged. Screw the VCs who were taking so long to make up their minds. We'd start our own investment firm and actually implement the ideas\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"Hello\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello! How are you today?\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Condensed question: What did Paul Graham do after Y Combinator?\n", + "Context: file_path: paul_graham_essay.txt\n", + "\n", + "so long to make up their minds. We'd start our own investment firm and actually implement the ideas we'd been talking about. I'd fund it, and Jessica could quit her job and work for it, and we'd get Robert and Trevor as partners too. [13]\n", + "\n", + "Once again, ignorance worked in our favor. We had no idea how to be angel investors, and in Boston in 2005 there were no Ron Conways to learn from. So we just made what seemed like the obvious choices, and some of the things we did turned out to be novel.\n", + "\n", + "There are multiple components to Y Combinator, and we didn't figure them all out at once. The part we got first was to be an angel firm. In those days, those two words didn't go together. There were VC firms, which were organized companies with people whose job it was to make investments, but they only did big, million dollar investments. And there were angels, who did smaller investments, but these were individuals who were usually focused on other things and made investments on the side. And neither of them helped founders enough in the beginning. We knew how helpless founders were in some respects, because we remembered how helpless we'd been. For example, one thing Julian had done for us that seemed to us like magic was to get us set up as a company. We were fine writing fairly difficult software, but actually getting incorporated, with bylaws and stock and all that stuff, how on earth did you do that? Our plan was not only to make seed investments, but to do for startups everything Julian had done for us.\n", + "\n", + "YC was not organized as a fund. It was cheap enough to run that we funded it with our own money. That went right by 99% of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" But once again, this was not due to any particular insight on our part. We didn't know how VC firms were organized. It never occurred to us to try to raise a fund, and if it had, we wouldn't have known where to start. [14]\n", + "\n", + "The most distinctive thing about YC is the batch model: to fund a bunch of startups all at once, twice a year, and then to spend three months focusing intensively on trying to help them. That part we discovered by accident, not merely implicitly but explicitly due to our ignorance about investing. We needed to get experience as investors. What better way, we thought, than to fund a whole bunch of startups at once? We knew undergrads got temporary jobs at tech companies during the summer. Why not organize a summer program where they'd start startups instead? We wouldn't feel guilty for being in a sense fake investors, because they would in a similar sense be fake founders. So while we probably wouldn't make much money out of it, we'd at least get to practice being investors on them, and they for their part would probably have a more interesting summer than they would working at Microsoft.\n", + "\n", + "We'd use the building I owned in Cambridge as our headquarters. We'd all have dinner there once a week — on tuesdays, since I was already cooking for the thursday diners on thursdays — and after dinner we'd bring in experts on startups to give talks.\n", + "\n", + "We knew undergrads were deciding then about summer jobs, so in a matter of days we cooked up something we called the Summer Founders Program, and I posted an announcement on my site, inviting undergrads to apply. I had never imagined that writing essays would be a way to get \"deal flow,\" as investors call it, but it turned out to be the perfect source. [15] We got 225 applications for the Summer Founders Program, and we were surprised to find that a lot of them were from people who'd already graduated, or were about to that spring. Already this SFP thing was starting to feel more serious than we'd intended.\n", + "\n", + "We invited about 20 of the 225 groups to interview in person, and from those we picked 8 to fund. They were an impressive group. That first batch included reddit, Justin Kan and Emmett Shear, who went on to found Twitch, Aaron Swartz, who had already helped write the RSS spec and would a few years later become a martyr for open access, and Sam Altman, who would later become the second president of YC. I don't think it was entirely luck that the first batch was so good. You had to be pretty bold to sign up for a weird thing like the Summer Founders Program instead of a summer job at a legit place like Microsoft or Goldman Sachs.\n", + "\n", + "The deal for startups was based on a combination of the deal we did with Julian ($10k for 10%) and what Robert said MIT grad students got for the summer ($6k). We invested $6k per founder, which in the typical two-founder case was $12k, in return for 6%. That had\n", + "\n", + "file_path: paul_graham_essay.txt\n", + "\n", + "which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\n", + "\n", + "Fairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\n", + "\n", + "As YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another's customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\n", + "\n", + "I had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\n", + "\n", + "In the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn't startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one's intellectual curiosity.\n", + "\n", + "HN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I'd had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one's work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\n", + "\n", + "As well as HN, I wrote all of YC's internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn't have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\n", + "\n", + "YC was different from other kinds of work I've done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn't have picked a better way to do it.\n", + "\n", + "There were parts of the job I didn't like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn't like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I'd better work very hard.\n", + "\n", + "One day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What did Paul Graham do after YC?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "After Y Combinator (YC), Paul Graham continued to focus on writing essays and working on various projects. He also worked on a new version of Arc, a programming language he created, and developed Hacker News, a news aggregator platform. Additionally, he remained involved in the startup community and continued to offer advice and insights through his writings and public appearances.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Condensed question: What did Paul Graham do after his involvement with Y Combinator, writing essays, working on projects, developing Arc, and creating Hacker News?\n", + "Context: file_path: paul_graham_essay.txt\n", + "\n", + "which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\n", + "\n", + "Fairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\n", + "\n", + "As YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another's customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\n", + "\n", + "I had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\n", + "\n", + "In the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn't startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one's intellectual curiosity.\n", + "\n", + "HN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I'd had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one's work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\n", + "\n", + "As well as HN, I wrote all of YC's internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn't have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\n", + "\n", + "YC was different from other kinds of work I've done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn't have picked a better way to do it.\n", + "\n", + "There were parts of the job I didn't like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn't like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I'd better work very hard.\n", + "\n", + "One day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember\n", + "\n", + "file_path: paul_graham_essay.txt\n", + "\n", + "I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he'd made a lot of money the last time I'd lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\n", + "\n", + "Hmph. Well, I'd do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it's now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn't so naive as to assume I could spring an overt Lisp on a general audience; we'd hide the parentheses, like Dylan did.\n", + "\n", + "By then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn't last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\n", + "\n", + "I started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn't want to run a company — especially not a big one, which it was looking like this would have to be. I'd only started Viaweb because I needed the money. Now that I didn't need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I'd build a subset that could be done as an open source project.\n", + "\n", + "Much to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\n", + "\n", + "The subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn't even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\n", + "\n", + "The following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we'd used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I'd created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\n", + "\n", + "Wow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\n", + "\n", + "This had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\n", + "\n", + "In the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\n", + "\n", + "I've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\n", + "\n", + "I knew that online essays would be a marginal medium at first. Socially they'd seem more like rants\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What about after that?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "After his involvement with Y Combinator and working on projects like Arc and Hacker News, Paul Graham continued to write essays and share his thoughts on various topics. He also remained active in the tech and startup community, offering advice and insights based on his experiences. Additionally, he continued to explore new ideas and projects, always looking for ways to contribute to the world of technology and entrepreneurship.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Condensed question: Tell me more about Paul Graham's activities after his involvement with Y Combinator and his projects like Arc and Hacker News.\n", + "Context: file_path: paul_graham_essay.txt\n", + "\n", + "which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\n", + "\n", + "Fairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\n", + "\n", + "As YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another's customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\n", + "\n", + "I had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\n", + "\n", + "In the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn't startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one's intellectual curiosity.\n", + "\n", + "HN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I'd had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one's work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\n", + "\n", + "As well as HN, I wrote all of YC's internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn't have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\n", + "\n", + "YC was different from other kinds of work I've done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn't have picked a better way to do it.\n", + "\n", + "There were parts of the job I didn't like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn't like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I'd better work very hard.\n", + "\n", + "One day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember\n", + "\n", + "file_path: paul_graham_essay.txt\n", + "\n", + "I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he'd made a lot of money the last time I'd lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\n", + "\n", + "Hmph. Well, I'd do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it's now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn't so naive as to assume I could spring an overt Lisp on a general audience; we'd hide the parentheses, like Dylan did.\n", + "\n", + "By then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn't last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\n", + "\n", + "I started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn't want to run a company — especially not a big one, which it was looking like this would have to be. I'd only started Viaweb because I needed the money. Now that I didn't need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I'd build a subset that could be done as an open source project.\n", + "\n", + "Much to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\n", + "\n", + "The subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn't even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\n", + "\n", + "The following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we'd used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I'd created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\n", + "\n", + "Wow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\n", + "\n", + "This had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\n", + "\n", + "In the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\n", + "\n", + "I've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\n", + "\n", + "I knew that online essays would be a marginal medium at first. Socially they'd seem more like rants\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"Can you tell me more?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Certainly! Paul Graham is known for his contributions to the tech industry, particularly through his work with Y Combinator, a startup accelerator he co-founded. He has written numerous essays on a wide range of topics, including startups, programming, and entrepreneurship. Graham's insights and advice have been valuable to many aspiring entrepreneurs and developers.\n", + "\n", + "In addition to his writing and work with startups, Graham has also been involved in creating programming languages like Arc and developing platforms such as Hacker News. His passion for technology and innovation is evident in his various projects and endeavors over the years.\n", + "\n", + "Overall, Paul Graham's impact on the tech community extends beyond his time at Y Combinator, as he continues to inspire and educate through his writings and contributions to the field of technology and entrepreneurship.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Customizing RAG Pipeline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.retrievers import VectorIndexRetriever\n", + "from llama_index.core.response_synthesizers import get_response_synthesizer\n", + "from llama_index.core.query_engine import RetrieverQueryEngine\n", + "\n", + "# create nodes\n", + "splitter = TokenTextSplitter(chunk_size=1024, chunk_overlap=20)\n", + "nodes = splitter.get_nodes_from_documents(documents)\n", + "\n", + "# Construct an index by loading documents into a VectorStoreIndex.\n", + "index = VectorStoreIndex(nodes)\n", + "\n", + "# configure retriever\n", + "retriever = VectorIndexRetriever(index=index, similarity_top_k=3)\n", + "\n", + "# configure response synthesizer\n", + "synthesizer = get_response_synthesizer(response_mode=\"refine\")\n", + "\n", + "# construct query engine\n", + "query_engine = RetrieverQueryEngine(\n", + " retriever=retriever,\n", + " response_synthesizer=synthesizer,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = query_engine.query(\"What did Paul Graham do growing up?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham grew up to become a successful entrepreneur and programmer.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Index as Retriever" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "retriever = index.as_retriever(similarity_top_k=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "retrieved_nodes = retriever.retrieve(\"What did Paul Graham do growing up?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "**Node ID:** 83fb80c2-eef6-4f32-b632-f1ed4034cf15
**Similarity:** 0.8332996078161921
**Text:** I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he'd made a lot of money the last time I'd lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\n", + "\n", + "Hmph. Well, I'd do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wan...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**Node ID:** 4a367ce5-0b22-4377-9341-fc9285d065a0
**Similarity:** 0.8268013589103509
**Text:** CMU, where he was in grad school. One day I went to visit the Carnegie Institute, where I'd spent a lot of time as a kid. While looking at a painting there I realized something that might seem obvious, but was a big surprise to me. There, right on the wall, was something you could make that would last. Paintings didn't become obsolete. Some of the best ones were hundreds of years old.\n", + "\n", + "And moreover this was something you could make a living doing. Not as easily as you could by writing softwar...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**Node ID:** 223787cc-0e88-40bd-94b7-b7003fcaffea
**Similarity:** 0.8230760740448807
**Text:** paint, and I could do that for free. So in 1993 I dropped out. I hung around Providence for a bit, and then my college friend Nancy Parmet did me a big favor. A rent-controlled apartment in a building her mother owned in New York was becoming vacant. Did I want it? It wasn't much more than my current place, and New York was supposed to be where the artists were. So yes, I wanted it! [7]\n", + "\n", + "Asterix comics begin by zooming in on a tiny corner of Roman Gaul that turns out not to be controlled by t...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from llama_index.core.response.notebook_utils import display_source_node\n", + "\n", + "for text_node in retrieved_nodes:\n", + " display_source_node(text_node, source_length=500)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-3/Evaluating_RAG_Systems.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-3/Evaluating_RAG_Systems.ipynb new file mode 100644 index 0000000000000..c5ded6ceb8b0e --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-3/Evaluating_RAG_Systems.ipynb @@ -0,0 +1,6136 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Evaluating RAG Systems\n", + "\n", + "Evaluation and benchmarking are crucial in developing LLM applications. Optimizing performance for applications like RAG (Retrieval Augmented Generation) requires a robust measurement mechanism.\n", + "\n", + "LlamaIndex provides essential modules to assess the quality of generated outputs and evaluate content retrieval quality. It categorizes its evaluation into two main types:\n", + "\n", + "* **Response Evaluation** : Assesses quality of Generated Outputs\n", + "* **Retrieval Evaluation** : Assesses Retrieval quality\n", + "\n", + "[Documentation\n", + "](https://docs.llamaindex.ai/en/latest/module_guides/evaluating/)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Response Evaluation\n", + "\n", + "Evaluating results from LLMs is distinct from traditional machine learning's straightforward outcomes. LlamaIndex employs evaluation modules, using a benchmark LLM like GPT-4, to gauge answer accuracy. Notably, these modules often blend query, context, and response, minimizing the need for ground-truth labels.\n", + "\n", + "The evaluation modules manifest in the following categories:\n", + "\n", + "* **Faithfulness:** Assesses whether the response remains true to the retrieved contexts, ensuring there's no distortion or \"hallucination.\"\n", + "* **Relevancy:** Evaluates the relevance of both the retrieved context and the generated answer to the initial query.\n", + "* **Correctness:** Determines if the generated answer aligns with the reference answer based on the query (this does require labels).\n", + "\n", + "Furthermore, LlamaIndex has the capability to autonomously generate questions from your data, paving the way for an evaluation pipeline to assess the RAG application." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# attach to the same event-loop\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()\n", + "\n", + "import logging\n", + "import sys\n", + "\n", + "# Set up the root logger\n", + "logger = logging.getLogger()\n", + "logger.setLevel(logging.INFO) # Set logger level to INFO\n", + "\n", + "# Clear out any existing handlers\n", + "logger.handlers = []\n", + "\n", + "# Set up the StreamHandler to output to sys.stdout (Colab's output)\n", + "handler = logging.StreamHandler(sys.stdout)\n", + "handler.setLevel(logging.INFO) # Set handler level to INFO\n", + "\n", + "# Add the handler to the logger\n", + "logger.addHandler(handler)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import sys\n", + "import pandas as pd\n", + "\n", + "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n", + "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n", + "\n", + "from llama_index.core.evaluation import (\n", + " DatasetGenerator,\n", + " FaithfulnessEvaluator,\n", + " RelevancyEvaluator,\n", + " CorrectnessEvaluator,\n", + " RetrieverEvaluator,\n", + " generate_question_context_pairs,\n", + ")\n", + "\n", + "from llama_index.core import (\n", + " SimpleDirectoryReader,\n", + " VectorStoreIndex,\n", + " Response,\n", + ")\n", + "\n", + "from llama_index.llms.openai import OpenAI\n", + "\n", + "import os" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-05-14 13:32:55-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 75042 (73K) [text/plain]\n", + "Saving to: ‘data/paul_graham/paul_graham_essay.txt’\n", + "\n", + "\r", + " data/paul 0%[ ] 0 --.-KB/s \r", + "data/paul_graham/pa 100%[===================>] 73.28K --.-KB/s in 0.01s \n", + "\n", + "2024-05-14 13:32:55 (5.19 MB/s) - ‘data/paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir -p 'data/paul_graham/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "reader = SimpleDirectoryReader(\"./data/paul_graham/\")\n", + "documents = reader.load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Generate Question" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6ebf35b78ab24b71a30ade0d76117ab9", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Parsing nodes: 0%| | 0/1 [00:00What I Worked On\n", + "\n", + "February 2021\n", + "\n", + "Before college the two main things I worked on, outside of school, were writing and programming. I didn't write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\n", + "\n", + "The first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district's 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain's lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\n", + "\n", + "The language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\n", + "\n", + "I was puzzled by the 1401. I couldn't figure out what to do with it. And in retrospect there's not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn't have any data stored on punched cards. The only other option was to do things that didn't rely on any input, like calculate approximations of pi, but I didn't know enough math to do anything interesting of that type. So I'm not surprised I can't remember any programs I wrote, because they can't have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn't. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager's expression made clear.\n", + "\n", + "With microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\n", + "\n", + "The first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\n", + "\n", + "Computers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he'd write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\n", + "\n", + "Though I liked programming, I didn't plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn't much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\n", + "\n", + "I couldn't have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\n", + "\n", + "AI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven't tried rereading The Moon is a Harsh Mistress, so I don't know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we'd have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from IPython.display import display, HTML\n", + "\n", + "display(HTML(f'

{nodes[1].get_text()}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Faithfullness Evaluator\n", + "\n", + " Measures if the response from a query engine matches any source nodes. This is useful for measuring if the response was hallucinated." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "faithfulness_evaluator = FaithfulnessEvaluator(llm=gpt4)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Generate response\n", + "response_vector = query_engine.query(eval_query)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "eval_result = faithfulness_evaluator.evaluate_response(\n", + " response=response_vector\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eval_result.passing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "EvaluationResult(query=None, contexts=[\"Now that I could write essays again, I wrote a bunch about topics I'd had stacked up. I kept writing essays through 2020, but I also started to think about other things I could work on. How should I choose what to do? Well, how had I chosen what to work on in the past? I wrote an essay for myself to answer that question, and I was surprised how long and messy the answer turned out to be. If this surprised me, who'd lived it, then I thought perhaps it would be interesting to other people, and encouraging to those with similarly messy lives. So I wrote a more detailed version for others to read, and this is the last sentence of it.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nNotes\\n\\n[1] My experience skipped a step in the evolution of computers: time-sharing machines with interactive OSes. I went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting.\\n\\n[2] Italian words for abstract concepts can nearly always be predicted from their English cognates (except for occasional traps like polluzione). It's the everyday words that differ. So if you string together a lot of abstract concepts with a few simple verbs, you can make a little Italian go a long way.\\n\\n[3] I lived at Piazza San Felice 4, so my walk to the Accademia went straight down the spine of old Florence: past the Pitti, across the bridge, past Orsanmichele, between the Duomo and the Baptistery, and then up Via Ricasoli to Piazza San Marco. I saw Florence at street level in every possible condition, from empty dark winter evenings to sweltering summer days when the streets were packed with tourists.\\n\\n[4] You can of course paint people like still lives if you want to, and they're willing. That sort of portrait is arguably the apex of still life painting, though the long sitting does tend to produce pained expressions in the sitters.\\n\\n[5] Interleaf was one of many companies that had smart people and built impressive technology, and yet got crushed by Moore's Law. In the 1990s the exponential growth in the power of commodity (i.e. Intel) processors rolled up high-end, special-purpose hardware and software companies like a bulldozer.\\n\\n[6] The signature style seekers at RISD weren't specifically mercenary. In the art world, money and coolness are tightly coupled. Anything expensive comes to be seen as cool, and anything seen as cool will soon become equally expensive.\\n\\n[7] Technically the apartment wasn't rent-controlled but rent-stabilized, but this is a refinement only New Yorkers would know or care about. The point is that it was really cheap, less than half market price.\\n\\n[8] Most software you can launch as soon as it's done. But when the software is an online store builder and you're hosting the stores, if you don't have any users yet, that fact will be painfully obvious. So before we could launch publicly we had to launch privately, in the sense of recruiting an initial set of users and making sure they had decent-looking stores.\\n\\n[9] We'd had a code editor in Viaweb for users to define their own page styles. They didn't know it, but they were editing Lisp expressions underneath. But this wasn't an app editor, because the code ran when the merchants' sites were generated, not when shoppers visited them.\\n\\n[10] This was the first instance of what is now a familiar experience, and so was what happened next, when I read the comments and found they were full of angry people. How could I claim that Lisp was better than other languages? Weren't they all Turing complete? People who see the responses to essays I write sometimes tell me how sorry they feel for me, but I'm not exaggerating when I reply that it has always been like this, since the very beginning. It comes with the territory. An essay must tell readers things they don't already know, and some people dislike being told such things.\\n\\n[11] People put plenty of stuff on the internet in the 90s of course, but putting something online is not the same as publishing it online. Publishing online means you treat the online version as the (or at least a) primary version.\\n\\n[12] There is a general lesson here that our experience with Y Combinator also teaches: Customs continue to constrain you long after the restrictions that caused them have disappeared. Customary VC practice had once, like the customs about publishing essays, been based on real constraints. Startups had once been much more expensive to start, and proportionally rare.\", 'What I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most.', 'I didn\\'t want to drop out of grad school, but how else was I going to get out? I remember when my friend Robert Morris got kicked out of Cornell for writing the internet worm of 1988, I was envious that he\\'d found such a spectacular way to get out of grad school.\\n\\nThen one day in April 1990 a crack appeared in the wall. I ran into professor Cheatham and he asked if I was far enough along to graduate that June. I didn\\'t have a word of my dissertation written, but in what must have been the quickest bit of thinking in my life, I decided to take a shot at writing one in the 5 weeks or so that remained before the deadline, reusing parts of On Lisp where I could, and I was able to respond, with no perceptible delay \"Yes, I think so. I\\'ll give you something to read in a few days.\"\\n\\nI picked applications of continuations as the topic. In retrospect I should have written about macros and embedded languages. There\\'s a whole world there that\\'s barely been explored. But all I wanted was to get out of grad school, and my rapidly written dissertation sufficed, just barely.\\n\\nMeanwhile I was applying to art schools. I applied to two: RISD in the US, and the Accademia di Belli Arti in Florence, which, because it was the oldest art school, I imagined would be good. RISD accepted me, and I never heard back from the Accademia, so off to Providence I went.\\n\\nI\\'d applied for the BFA program at RISD, which meant in effect that I had to go to college again. This was not as strange as it sounds, because I was only 25, and art schools are full of people of different ages. RISD counted me as a transfer sophomore and said I had to do the foundation that summer. The foundation means the classes that everyone has to take in fundamental subjects like drawing, color, and design.\\n\\nToward the end of the summer I got a big surprise: a letter from the Accademia, which had been delayed because they\\'d sent it to Cambridge England instead of Cambridge Massachusetts, inviting me to take the entrance exam in Florence that fall. This was now only weeks away. My nice landlady let me leave my stuff in her attic. I had some money saved from consulting work I\\'d done in grad school; there was probably enough to last a year if I lived cheaply. Now all I had to do was learn Italian.\\n\\nOnly stranieri (foreigners) had to take this entrance exam. In retrospect it may well have been a way of excluding them, because there were so many stranieri attracted by the idea of studying art in Florence that the Italian students would otherwise have been outnumbered. I was in decent shape at painting and drawing from the RISD foundation that summer, but I still don\\'t know how I managed to pass the written exam. I remember that I answered the essay question by writing about Cezanne, and that I cranked up the intellectual level as high as I could to make the most of my limited vocabulary. [2]\\n\\nI\\'m only up to age 25 and already there are such conspicuous patterns. Here I was, yet again about to attend some august institution in the hopes of learning about some prestigious subject, and yet again about to be disappointed. The students and faculty in the painting department at the Accademia were the nicest people you could imagine, but they had long since arrived at an arrangement whereby the students wouldn\\'t require the faculty to teach anything, and in return the faculty wouldn\\'t require the students to learn anything. And at the same time all involved would adhere outwardly to the conventions of a 19th century atelier. We actually had one of those little stoves, fed with kindling, that you see in 19th century studio paintings, and a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they\\'d seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She\\'d copy an obscure old painting out of a book, and then he\\'d take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time.'], response='The author described their early attempts at writing short stories as lacking in plot, focusing mainly on characters with strong feelings, which the author believed made the stories deep.', passing=True, feedback='YES', score=1.0, pairwise_source=None, invalid_result=False, invalid_reason=None)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eval_result" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Relevency Evaluation\n", + "\n", + "Measures if the response + source nodes match the query." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create RelevancyEvaluator using GPT-4 LLM\n", + "relevancy_evaluator = RelevancyEvaluator(llm=gpt4)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "# Generate response\n", + "response_vector = query_engine.query(eval_query)\n", + "\n", + "# Evaluation\n", + "eval_result = relevancy_evaluator.evaluate_response(\n", + " query=eval_query, response=response_vector\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'How did the author describe their early attempts at writing short stories?'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eval_result.query" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'The author described their early attempts at writing short stories as being focused more on characters with strong feelings rather than having a strong plot.'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eval_result.response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eval_result.passing" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Relevancy evaluation with multiple source nodes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "# Create Query Engine with similarity_top_k=3\n", + "query_engine = vector_index.as_query_engine(similarity_top_k=3)\n", + "\n", + "# Create response\n", + "response_vector = query_engine.query(eval_query)\n", + "\n", + "# Evaluate with each source node\n", + "eval_source_result_full = [\n", + " relevancy_evaluator.evaluate(\n", + " query=eval_query,\n", + " response=response_vector.response,\n", + " contexts=[source_node.get_content()],\n", + " )\n", + " for source_node in response_vector.source_nodes\n", + "]\n", + "\n", + "# Evaluation result\n", + "eval_source_result = [\n", + " \"Pass\" if result.passing else \"Fail\" for result in eval_source_result_full\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Fail', 'Pass', 'Fail']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "eval_source_result" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Correctness Evaluator\n", + "\n", + "Evaluates the relevance and correctness of a generated answer against a reference answer." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "correctness_evaluator = CorrectnessEvaluator(llm=gpt4)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query = \"Can you explain the theory of relativity proposed by Albert Einstein in detail?\"\n", + "\n", + "reference = \"\"\"\n", + "Certainly! Albert Einstein's theory of relativity consists of two main components: special relativity and general relativity. Special relativity, published in 1905, introduced the concept that the laws of physics are the same for all non-accelerating observers and that the speed of light in a vacuum is a constant, regardless of the motion of the source or observer. It also gave rise to the famous equation E=mc², which relates energy (E) and mass (m).\n", + "\n", + "General relativity, published in 1915, extended these ideas to include the effects of gravity. According to general relativity, gravity is not a force between masses, as described by Newton's theory of gravity, but rather the result of the warping of space and time by mass and energy. Massive objects, such as planets and stars, cause a curvature in spacetime, and smaller objects follow curved paths in response to this curvature. This concept is often illustrated using the analogy of a heavy ball placed on a rubber sheet, causing it to create a depression that other objects (representing smaller masses) naturally move towards.\n", + "\n", + "In essence, general relativity provided a new understanding of gravity, explaining phenomena like the bending of light by gravity (gravitational lensing) and the precession of the orbit of Mercury. It has been confirmed through numerous experiments and observations and has become a fundamental theory in modern physics.\n", + "\"\"\"\n", + "\n", + "response = \"\"\"\n", + "Certainly! Albert Einstein's theory of relativity consists of two main components: special relativity and general relativity. Special relativity, published in 1905, introduced the concept that the laws of physics are the same for all non-accelerating observers and that the speed of light in a vacuum is a constant, regardless of the motion of the source or observer. It also gave rise to the famous equation E=mc², which relates energy (E) and mass (m).\n", + "\n", + "However, general relativity, published in 1915, extended these ideas to include the effects of magnetism. According to general relativity, gravity is not a force between masses but rather the result of the warping of space and time by magnetic fields generated by massive objects. Massive objects, such as planets and stars, create magnetic fields that cause a curvature in spacetime, and smaller objects follow curved paths in response to this magnetic curvature. This concept is often illustrated using the analogy of a heavy ball placed on a rubber sheet with magnets underneath, causing it to create a depression that other objects (representing smaller masses) naturally move towards due to magnetic attraction.\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "correctness_result = correctness_evaluator.evaluate(\n", + " query=query,\n", + " response=response,\n", + " reference=reference,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "EvaluationResult(query='Can you explain the theory of relativity proposed by Albert Einstein in detail?', contexts=None, response=\"\\nCertainly! Albert Einstein's theory of relativity consists of two main components: special relativity and general relativity. Special relativity, published in 1905, introduced the concept that the laws of physics are the same for all non-accelerating observers and that the speed of light in a vacuum is a constant, regardless of the motion of the source or observer. It also gave rise to the famous equation E=mc², which relates energy (E) and mass (m).\\n\\nHowever, general relativity, published in 1915, extended these ideas to include the effects of magnetism. According to general relativity, gravity is not a force between masses but rather the result of the warping of space and time by magnetic fields generated by massive objects. Massive objects, such as planets and stars, create magnetic fields that cause a curvature in spacetime, and smaller objects follow curved paths in response to this magnetic curvature. This concept is often illustrated using the analogy of a heavy ball placed on a rubber sheet with magnets underneath, causing it to create a depression that other objects (representing smaller masses) naturally move towards due to magnetic attraction.\\n\", passing=False, feedback='The generated answer is relevant to the user query as it attempts to explain the theory of relativity proposed by Albert Einstein. However, it contains significant mistakes. The general relativity part of the answer incorrectly attributes the warping of space and time to magnetic fields, which is not accurate. General relativity is about the effects of gravity, not magnetism. The analogy used also incorrectly introduces magnets, which is misleading.', score=2.5, pairwise_source=None, invalid_result=False, invalid_reason=None)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "correctness_result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2.5" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "correctness_result.score" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "False" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "correctness_result.passing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'The generated answer is relevant to the user query as it attempts to explain the theory of relativity proposed by Albert Einstein. However, it contains significant mistakes. The general relativity part of the answer incorrectly attributes the warping of space and time to magnetic fields, which is not accurate. General relativity is about the effects of gravity, not magnetism. The analogy used also incorrectly introduces magnets, which is misleading.'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "correctness_result.feedback" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Retrieval Evaluation\n", + "\n", + "Evaluates the quality of any Retriever module defined in LlamaIndex.\n", + "\n", + "To assess the quality of a Retriever module in LlamaIndex, we use metrics like hit-rate and MRR. These compare retrieved results to ground-truth context for any question. For simpler evaluation dataset creation, we utilize synthetic data generation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "reader = SimpleDirectoryReader(\"./data/paul_graham/\")\n", + "documents = reader.load_data()\n", + "\n", + "from llama_index.core.text_splitter import SentenceSplitter\n", + "\n", + "# create parser and parse document into nodes\n", + "parser = SentenceSplitter(chunk_size=1024, chunk_overlap=100)\n", + "nodes = parser(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "vector_index = VectorStoreIndex(nodes)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Define the retriever\n", + "retriever = vector_index.as_retriever(similarity_top_k=2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "retrieved_nodes = retriever.retrieve(eval_query)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "**Node ID:** 6447ac8e-b79c-48e2-a725-a29b79eabc32
**Similarity:** 0.8353830045794844
**Text:** What I Worked On\n", + "\n", + "February 2021\n", + "\n", + "Before college the two main things I worked on, outside of school, were writing and programming. I didn't write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\n", + "\n", + "The first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district's 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain's lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\n", + "\n", + "The language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\n", + "\n", + "I was puzzled by the 1401. I couldn't figure out what to do with it. And in retrospect there's not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn't have any data stored on punched cards. The only other option was to do things that didn't rely on any input, like calculate approximations of pi, but I didn't know enough math to do anything interesting of that type. So I'm not surprised I can't remember any programs I wrote, because they can't have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn't. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager's expression made clear.\n", + "\n", + "With microcomputers, everything changed. Now you could h...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**Node ID:** 0689f30f-4066-4d08-a87a-1e67d6f2abc6
**Similarity:** 0.8301979532393335
**Text:** In one day it got 30,000 page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\n", + "\n", + "Wow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\n", + "\n", + "This had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\n", + "\n", + "In the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\n", + "\n", + "I've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\n", + "\n", + "I knew that online essays would be a marginal medium at first. Socially they'd seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\n", + "\n", + "One of the most conspicuous patterns I've noticed in my life is how well it has worked, f...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from llama_index.core.response.notebook_utils import display_source_node\n", + "\n", + "for node in retrieved_nodes:\n", + " display_source_node(node, source_length=2000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\r", + " 0%| | 0/19 [00:00\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
retrievershit_ratemrr
0top-2 eval0.8947370.776316
\n", + "
\n", + "
\n", + "\n", + "
\n", + " \n", + "\n", + " \n", + "\n", + " \n", + "
\n", + "\n", + "
\n", + " \n" + ], + "text/plain": [ + " retrievers hit_rate mrr\n", + "0 top-2 eval 0.894737 0.776316" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "display_results(\"top-2 eval\", eval_results)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-4/Ingestion_Pipeline.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-4/Ingestion_Pipeline.ipynb new file mode 100644 index 0000000000000..0ae562047ab47 --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-4/Ingestion_Pipeline.ipynb @@ -0,0 +1,599 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Ingestion Pipeline\n", + "\n", + "In this notebook we will demonstrate usage of Ingestion Pipeline in building RAG applications.\n", + "\n", + "[Ingestion Pipeline](https://docs.llamaindex.ai/en/stable/module_guides/loading/ingestion_pipeline/)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index llama-index-vector-stores-qdrant" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set API Key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()\n", + "\n", + "import os\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-04-26 13:35:44-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.111.133, 185.199.108.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 75042 (73K) [text/plain]\n", + "Saving to: ‘data/paul_graham/paul_graham_essay.txt’\n", + "\n", + "data/paul_graham/pa 100%[===================>] 73.28K --.-KB/s in 0.009s \n", + "\n", + "2024-04-26 13:35:44 (8.36 MB/s) - ‘data/paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir -p 'data/paul_graham/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SimpleDirectoryReader\n", + "\n", + "documents = SimpleDirectoryReader(\"./data/paul_graham/\").load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Ingestion Pipeline - Apply Transformations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import Document\n", + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.core.node_parser import TokenTextSplitter\n", + "from llama_index.core.extractors import TitleExtractor\n", + "from llama_index.core.ingestion import IngestionPipeline, IngestionCache" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Text Splitters" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " ]\n", + ")\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "TextNode(id_='c6856f07-73bc-44ce-bd0b-5e27271f9f0f', embedding=None, metadata={'file_path': '/content/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-26', 'last_modified_date': '2024-04-26'}, excluded_embed_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], excluded_llm_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], relationships={: RelatedNodeInfo(node_id='244aec5e-98e0-48d1-81fd-9c12c2fe4c5c', node_type=, metadata={'file_path': '/content/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-26', 'last_modified_date': '2024-04-26'}, hash='952e9dc1a243648316292b0771f0f024a059072e500f7da0092671800767f543'), : RelatedNodeInfo(node_id='fe681b68-998e-4f5f-b113-2684fbaa543a', node_type=, metadata={}, hash='d3386e1e52a73d6920911fc30d0592648217874646641b9e7c64ef1c1f4cc82b')}, text='What I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach', start_char_idx=2, end_char_idx=4473, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Text Splitter + Metadata Extractor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 5/5 [00:01<00:00, 3.71it/s]\n" + ] + } + ], + "source": [ + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " ]\n", + ")\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'From Painting to Programming: A Journey through Writing, AI, and Fine Arts'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0].metadata[\"document_title\"]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Text Splitter + Metadata Extractor + OpenAI Embedding" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 5/5 [00:01<00:00, 4.31it/s]\n" + ] + } + ], + "source": [ + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " OpenAIEmbedding(),\n", + " ]\n", + ")\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'Journeys in Writing, Programming, and Art: Exploring the Evolution of Artificial Intelligence and the Intersection of Technology and Creativity'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0].metadata[\"document_title\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "TextNode(id_='0a6d8435-cc5c-4100-b12c-e22d175190c6', embedding=[0.004466439131647348, -0.01828564889729023, -0.007774787023663521, -0.02322954684495926, 0.005550032947212458, 0.034214481711387634, -0.02435377426445484, -0.005089505575597286, -0.017676126211881638, -0.024462133646011353, 0.02787545509636402, 0.03118041716516018, 1.217588214785792e-05, -0.0046763853169977665, -0.0014975607628002763, 0.01750004291534424, 0.022349126636981964, 0.014804602600634098, 0.003238930134102702, -0.01607782579958439, -0.02095399796962738, -0.009826842695474625, 0.008926105685532093, -0.007693517487496138, 0.006928229238837957, -0.0003453955869190395, 0.019382787868380547, -0.04445444419980049, -0.003735013073310256, -0.014249260537326336, 0.01731041446328163, -0.008208224549889565, -0.006948546506464481, -0.006609923206269741, -0.032670360058546066, -0.0017185123870149255, -0.014587883837521076, -0.005929290782660246, 0.011431916616857052, -0.012617097236216068, 0.03632748872041702, 0.026669956743717194, -0.005414583720266819, -0.0037891927640885115, -0.019491147249937057, 0.014493069611489773, 0.019098343327641487, 0.004730565007776022, -0.0012046517804265022, 0.0229451023042202, 0.011337102390825748, 0.037898700684309006, -0.00988102238625288, -0.0025955461896955967, -0.023893248289823532, 0.00570579944178462, 0.02282319776713848, 0.024746578186750412, 0.005512784235179424, 0.012596780434250832, 0.01019255630671978, 0.005699027329683304, -0.00018285648548044264, 0.012583235278725624, -0.012705139815807343, 0.003738399362191558, -0.024042241275310516, 0.008113410323858261, 0.011709587648510933, -0.019220247864723206, -0.000280422274954617, 0.005583895370364189, -0.026033345609903336, 0.00767997233197093, 0.03275162726640701, 0.012312336824834347, 0.004513846244663, -0.007178809959441423, -0.003839986165985465, -0.006813097279518843, -0.01393772754818201, 0.0021214738953858614, 0.0014315292937681079, 0.009975837543606758, 0.0035047493875026703, -0.011418371461331844, -0.000737351831048727, 0.026250064373016357, -0.01476396806538105, -0.03185766190290451, 0.014018997550010681, 0.01748649775981903, 0.001982638379558921, 0.027631646022200584, -0.0066844201646745205, 0.019436966627836227, -0.004263265058398247, 0.009786208160221577, -0.0037959651090204716, -0.030638620257377625, 0.007077223155647516, -0.003252475056797266, 0.011939851567149162, -0.015197405591607094, -0.029338307678699493, 0.028146354481577873, 0.0037079232279211283, -0.007585158105939627, 0.01535994466394186, -0.02077791467308998, 0.020032944157719612, 0.03459373861551285, 0.013334978371858597, -0.03936155140399933, -0.021360347047448158, -0.03351014479994774, 0.02118426188826561, -0.03088242933154106, -0.02141452580690384, -0.02066955529153347, -0.0006708970759063959, 0.014967141672968864, 0.016443539410829544, -0.009393405169248581, 0.012508737854659557, 0.03066571056842804, -0.023473354056477547, -0.01846173219382763, 0.014967141672968864, -0.02164478972554207, 0.01326048094779253, 0.0006459236028604209, 0.030638620257377625, 0.00215025688521564, -0.014587883837521076, 0.014208626002073288, -0.023717163130640984, 0.0009058168507181108, -0.0075445231050252914, -0.012461330741643906, 0.009041237644851208, 0.018028294667601585, -0.010226418264210224, -0.013842913322150707, -0.008336901664733887, 0.01607782579958439, 0.009081872180104256, 0.0020588284824043512, 0.017513588070869446, 0.018664905801415443, -0.0009396791574545205, -0.013423020951449871, -0.02191568911075592, 0.0028850689996033907, 0.014289896003901958, -0.0029155451338738203, 0.01659253239631653, 0.028769420459866524, -0.012501965276896954, 0.0029273969121277332, 0.0208998192101717, 0.025924986228346825, 0.005170775111764669, 0.002055442426353693, -0.0014924814458936453, 0.02635842375457287, 0.026602232828736305, 0.014262805692851543, -0.014804602600634098, -0.02198341302573681, -0.0058547938242554665, 0.025884350761771202, -0.04190799593925476, 0.0012943869223818183, 0.010355095379054546, 0.01913897879421711, 0.02202404849231243, 0.004815220832824707, -0.02583017200231552, -0.012332653626799583, 0.007740924600511789, 0.0017168192425742745, 0.027496198192238808, 0.01901707425713539, -0.010361867025494576, 0.004578184802085161, 0.012542600743472576, -0.01061922125518322, -0.005397652741521597, -0.0023212614469230175, 0.019057709723711014, 0.011310012079775333, -0.018272103741765022, -0.02100817859172821, -0.6514567136764526, -0.032291099429130554, 0.010409275069832802, -0.010057106614112854, -0.002747926628217101, 0.00908864475786686, -0.0016516342293471098, 0.008912560530006886, 0.010781760327517986, 0.03532516211271286, -0.016511263325810432, -0.005079346708953381, 0.013585560023784637, 0.006298390217125416, 0.003443797118961811, -0.0016236978117376566, 0.02435377426445484, -0.017283324152231216, -0.011039113625884056, -0.0017185123870149255, -0.0190306194126606, 0.024326685816049576, -0.0029866560362279415, -0.01993812993168831, -0.009373088367283344, 0.021658334881067276, 0.001068355981260538, -0.00684018712490797, 0.0008097325335256755, 0.026209428906440735, -0.02249811962246895, 0.019464056938886642, 0.009393405169248581, -0.004337762016803026, 0.05393588915467262, -0.011743449606001377, -0.0113845095038414, 0.016050735488533974, 0.026764770969748497, 0.024800756946206093, -0.03061152994632721, -0.026886675506830215, -0.010043561458587646, 0.004493528977036476, -0.0018234854796901345, 0.012325881980359554, 0.022701293230056763, -0.00826240424066782, 0.007659655064344406, -0.0037858064752072096, -0.005441673565655947, 0.014777513220906258, -0.0028376616537570953, -0.010165465995669365, 0.006427066866308451, 0.0021248601842671633, 0.021373892202973366, -0.009312136098742485, 0.0003913213440682739, -0.01198048610240221, -0.005590667948126793, 0.009610123932361603, -0.029907194897532463, 0.014154446311295033, -0.009603351354598999, 0.007463253568857908, -0.009075099602341652, -0.005807386711239815, -0.0023043302353471518, -0.020710190758109093, 0.020628919824957848, 0.02522064931690693, -0.015495394356548786, -0.019260883331298828, 0.00263956724666059, 0.012793181464076042, 0.0201006680727005, 0.0014069790486246347, -0.007977960631251335, 0.0013104714453220367, 0.0095153097063303, -0.004937124904245138, -0.007314259186387062, -0.008912560530006886, 0.02844434231519699, -0.006308548618108034, -0.02901322953402996, 0.0016220047837123275, 0.008404625579714775, 0.00023407323169521987, -0.0043580797500908375, 0.016199730336666107, -0.029690474271774292, -0.04095985367894173, -0.007077223155647516, 0.0012757625663653016, -0.0065184952691197395, -0.013382385484874249, 0.007009498775005341, -0.03399776294827461, 0.005495853256434202, 0.0007686744793318212, 0.023202456533908844, 0.01611846126616001, 0.018082475289702415, -0.00888547021895647, 0.006982408929616213, 0.020479926839470863, 0.01104588620364666, -0.03879266604781151, -0.0004931199364364147, 0.002641260391101241, -0.018922260031104088, 0.003147501964122057, -0.008614571765065193, -0.018705541267991066, 0.011560593731701374, 0.0017185123870149255, 0.016538353636860847, -0.006630240473896265, 0.025342553853988647, -0.014222171157598495, -0.018935805186629295, -0.0055331019684672356, -0.007009498775005341, 0.002561683999374509, 0.00133840786293149, -0.03283289819955826, -0.04640491306781769, 0.012014348059892654, -0.010226418264210224, -0.013287571258842945, 0.021170716732740402, -0.008391081355512142, 0.0014730105176568031, -0.008871925994753838, 0.013097941875457764, -0.012556144967675209, 0.0030882428400218487, -0.0020351249258965254, -0.0329141691327095, -0.003735013073310256, 0.007172037847340107, 0.008722931146621704, -0.006897753104567528, -0.024448588490486145, -0.0010395729914307594, -0.00010412660776637495, 0.0012588314712047577, 0.006897753104567528, 0.009610123932361603, -0.007876373827457428, -0.007151720114052296, 0.009291818365454674, -0.013450110331177711, -0.0071381754241883755, -0.014872327446937561, -0.022240767255425453, -0.0172562338411808, -0.018597181886434555, -0.028417252004146576, 0.04247688502073288, -0.004043160006403923, 0.006071512587368488, 0.007077223155647516, 0.003254168201237917, 0.005421356298029423, 0.036137860268354416, -0.029392486438155174, -0.03459373861551285, 0.012203977443277836, 0.007578385528177023, -0.005035325884819031, 0.0017202054150402546, -0.005709185730665922, 0.018258558586239815, -0.015698567032814026, 0.0017405227990821004, 0.0038873935118317604, -0.012454558163881302, -0.005688868463039398, 0.024733033031225204, 0.021604154258966446, -0.014046086929738522, 0.020141303539276123, 0.0019064481602981687, 0.0227690190076828, 0.016782162711024284, -0.001776078250259161, 0.003575860057026148, 0.016145549714565277, 0.012258157134056091, -0.010686946101486683, 0.0013020058395341039, -0.007713834755122662, 0.029284127056598663, -0.008066002279520035, -0.0037722615525126457, 0.009386632591485977, 0.024123510345816612, 0.008912560530006886, 0.009027692489326, 0.003938186913728714, -0.009488219395279884, 0.011357419192790985, -0.03497299551963806, -0.0008656053687445819, -0.031261686235666275, 0.028498521074652672, -0.01011128630489111, -0.004080408718436956, -0.007632565218955278, -0.015468304045498371, -0.03730272129178047, 0.011296466924250126, 0.015563118271529675, -0.016199730336666107, 0.009318908676505089, -0.05171452462673187, 0.011079748161137104, 0.0025938530452549458, 0.036706745624542236, -0.0011623238679021597, -0.009738801047205925, 0.00826240424066782, 0.037167273461818695, 0.004842310678213835, 0.0020842254161834717, -0.005001463461667299, -0.027455562725663185, 0.006115533411502838, -0.00809309259057045, 0.020886274054646492, 0.001722745131701231, 0.020859183743596077, 0.015468304045498371, 0.019640140235424042, -0.01629454456269741, 0.040174245834350586, -0.002385599771514535, -0.023933881893754005, 0.028200533241033554, 0.03248072788119316, -0.03692346438765526, 0.00809309259057045, 0.004845696967095137, 0.05014331266283989, -3.158718755003065e-05, -0.01221075002104044, 0.020141303539276123, -0.021211352199316025, 0.007212672382593155, -0.0011157632106915116, -0.005160616245120764, 0.007768014445900917, -0.009041237644851208, 0.005282520782202482, 0.007558068260550499, 0.01750004291534424, 0.02726593427360058, -0.015861107036471367, 0.004080408718436956, -0.02253875508904457, -0.002495652297511697, -0.0026361809577792883, -0.013646511361002922, -0.0019403104670345783, 0.003663902170956135, -0.015197405591607094, -0.009244411252439022, -0.005881883669644594, -0.02589789591729641, 0.00019365009211469442, -0.0063254800625145435, 0.032561998814344406, 0.003167819231748581, 0.015034866519272327, 0.004825379233807325, 0.029663385823369026, 0.015048411674797535, -0.04139329120516777, -0.026277154684066772, 0.013951272703707218, -0.008194679394364357, -0.003988980315625668, 0.0109849339351058, -0.008851608261466026, 0.006653944496065378, 0.007321031764149666, -0.0007365052588284016, -0.011127155274152756, -0.004286968614906073, 0.005211409647017717, -0.0010090968571603298, -0.017053060233592987, 0.007530978415161371, 0.0314784049987793, -0.01890871487557888, 0.006237437948584557, -0.016511263325810432, 0.019260883331298828, 0.013409475795924664, -0.018136654049158096, -0.01954532600939274, 0.0433979406952858, -0.02833598293364048, -0.009901340119540691, 0.0006806324818171561, 0.009061554446816444, -0.01778448559343815, 0.005059029441326857, 0.012373289093375206, 0.004286968614906073, 0.01200757548213005, 0.00908864475786686, 0.011513185687363148, -0.004198926500976086, -0.005800614133477211, 0.017649037763476372, 0.02226785570383072, 0.002195970853790641, -0.028471432626247406, -0.02413705550134182, 0.0019470829283818603, 0.09681912511587143, 0.03413321077823639, -0.013666829094290733, 0.005759979132562876, 0.009488219395279884, -0.007178809959441423, -0.018096020445227623, -0.03020518273115158, 0.03288707882165909, 0.002785175107419491, 0.01800120435655117, -0.007781559135764837, 0.011167790740728378, 0.0009997847955673933, 0.014371165074408054, -0.005197864957153797, 0.00034984000376425683, -0.04196217656135559, 0.010612448677420616, -0.011174563318490982, 0.02616879530251026, 0.008621344342827797, 0.0015949149383231997, 0.006982408929616213, -0.0021400980185717344, -0.029148677363991737, 0.021793784573674202, 0.00016698353283572942, 0.014831692911684513, -0.01624036394059658, -0.0032084539998322725, 0.008871925994753838, 0.02584371715784073, 0.0007902617217041552, -0.00888547021895647, 0.03077406994998455, 0.015563118271529675, 0.036652565002441406, 0.006667489185929298, -0.01164186280220747, 0.008330129086971283, 0.02287737838923931, 0.0024143827613443136, -0.02755037695169449, -0.006284845061600208, -0.022687749937176704, -0.0006819022819399834, 0.04616110399365425, -0.014980686828494072, -0.041880909353494644, 0.006904525216668844, 0.020290296524763107, -0.025748902931809425, -0.005455218255519867, 0.0011106837773695588, 0.02545091323554516, 0.021739603951573372, 0.0026344878133386374, 0.004324217326939106, 0.014452435076236725, -8.820074435789138e-05, -0.01981622539460659, 0.004320831038057804, -0.004815220832824707, 0.0050624157302081585, -0.024435045197606087, -0.00607489887624979, 0.015468304045498371, -0.003965276759117842, 0.03600240871310234, 0.012014348059892654, -0.0024008378386497498, -0.04388555511832237, 0.003333744592964649, 0.015928830951452255, -0.003338824026286602, 0.022118862718343735, -0.0026717365253716707, -0.004429190419614315, 0.01942342147231102, -0.011628317646682262, -0.037681981921195984, 0.012515510432422161, -0.021373892202973366, 0.0075445231050252914, 0.001223276020027697, -0.017527133226394653, -0.0043580797500908375, -0.00783573929220438, 0.007652882486581802, 0.011418371461331844, 0.014330530539155006, 0.003575860057026148, -0.016267454251646996, 0.024231869727373123, 0.0005155536928214133, 0.005506012123078108, 0.018353372812271118, 0.009562716819345951, -0.024123510345816612, 0.002543059643357992, -0.011377736926078796, -0.02885068953037262, -0.03145131468772888, 0.005285907071083784, 0.011567365378141403, 0.02129262126982212, 0.030909517779946327, -0.003643584670498967, -0.02043929137289524, 0.01691761054098606, -0.0010378798469901085, 0.015454758889973164, 0.007131402846425772, 0.031315866857767105, 0.03004264272749424, 0.006379659753292799, -0.0026209428906440735, -0.0017015811754390597, -0.005831090267747641, -0.00885838083922863, -0.04402100667357445, 0.018434641882777214, 0.008140499703586102, -0.007063678465783596, 0.012705139815807343, 0.010151920840144157, -0.009643986821174622, -0.024177690967917442, -0.01450661476701498, -0.00027386145666241646, 0.02333790622651577, 0.0014035928761586547, -0.023933881893754005, -0.014777513220906258, 0.0009464516188018024, -0.017811575904488564, 0.005387493874877691, -0.007598702795803547, 0.01326048094779253, -0.000933753268327564, 0.003745171707123518, -0.002834275597706437, -0.01559020858258009, -0.007490343414247036, -0.02488202601671219, -0.006135851144790649, -0.003150888020172715, 0.01970786601305008, 0.024895571172237396, 0.005743048153817654, 0.006806324701756239, -0.008926105685532093, 0.006108761299401522, -0.007111085578799248, -0.02736074849963188, -0.008296266198158264, 0.013348523527383804, 0.05106436833739281, 0.006112147122621536, 0.038305047899484634, -0.01970786601305008, 0.03169512376189232, 0.013551697134971619, 0.0010666628368198872, 0.005617757793515921, 0.012739001773297787, 0.003518294310197234, -0.013497517444193363, 0.0002874063793569803, 0.02248457446694374, -0.006322093773633242, 0.01217688713222742, -0.013592331670224667, 0.016660258173942566, 0.020073577761650085, -0.0027005192823708057, 0.010429591871798038, -0.017703216522932053, -0.04900553822517395, -0.014018997550010681, 0.026588687673211098, 0.014059632085263729, 0.016226820647716522, -0.013389158062636852, -0.0023263408802449703, 0.023378539830446243, 0.0017388297710567713, 0.011330329813063145, 0.008391081355512142, 0.032345280051231384, -0.014953597448766232, -0.011086520738899708, 0.008052458055317402, -0.0041650645434856415, -0.022010503336787224, -0.005157229956239462, -0.04288323223590851, 0.02639905922114849, 0.0137413265183568, -0.004507073666900396, 0.009339225478470325, 0.004246334079653025, -0.024380864575505257, 0.001368037424981594, -0.0001503698294982314, -0.009847160428762436, -0.0018539616139605641, 0.01800120435655117, -0.00708399573341012, -0.0077341520227491856, -0.04326248914003372, 0.0027106781490147114, -0.008303038775920868, -0.009102189913392067, -0.005763365421444178, 0.008235313929617405, 0.015874652191996574, -0.02066955529153347, -0.03180348500609398, 0.021509340032935143, -0.013125032186508179, 0.04981823265552521, -0.00219427770934999, 0.026764770969748497, 0.03215565159916878, 0.01663316786289215, -0.010206100530922413, 0.0071178581565618515, 0.0012435934040695429, 0.005004849750548601, 0.031613852828741074, 0.023243090137839317, -0.021048814058303833, 0.006511722691357136, -0.023012828081846237, -0.006691192742437124, -0.017635492607951164, -0.04402100667357445, 0.004903262946754694, 0.01602364517748356, -0.01584756188094616, -0.007740924600511789, -0.01155382115393877, -0.04865337163209915, 0.016091370955109596, -0.003518294310197234, -0.0009616896859370172, -0.01096461620181799, -0.0070501333102583885, -0.00945435743778944, -0.004466439131647348, -0.0009777742670848966, 0.014140901155769825, 0.011018795892596245, 0.011960168369114399, -0.003447183407843113, -0.03088242933154106, 0.009921657852828503, 0.018813900649547577, -0.0002296288002980873, 0.005699027329683304, -0.021875053644180298, 0.008025367744266987, -0.004080408718436956, -0.006620082072913647, -0.008235313929617405, -0.02289092354476452, 0.012312336824834347, 0.030746979638934135, -0.002769937040284276, 0.0042497203685343266, -0.015319310128688812, -0.006769075989723206, -0.00027068686904385686, -0.007808648981153965, -0.0015322696417570114, -0.008580709807574749, -0.019260883331298828, -0.009027692489326, 0.019044164568185806, 0.015563118271529675, -0.006989181041717529, -0.012122707441449165, -0.007903463207185268, -0.009637214243412018, 0.010036788880825043, 0.0024431657511740923, 0.0032355438452214003, -0.043072860687971115, -0.006843573413789272, -0.00954239908605814, 0.00434453459456563, -0.010768215171992779, 0.007470026146620512, -0.0016389358788728714, 0.0003136496525257826, 0.048003215342760086, -0.014953597448766232, 0.009549171663820744, 0.004398714285343885, -0.010402502492070198, -0.023188911378383636, -0.001190260285511613, -0.00832335650920868, -0.006511722691357136, 0.023419175297021866, -0.024800756946206093, -0.025315463542938232, -0.016497718170285225, 0.007720607332885265, 0.021766694262623787, -0.024123510345816612, -0.0006201036158017814, 0.012400378473103046, 0.006200189236551523, -0.008553620427846909, -0.008391081355512142, -0.005245272070169449, -0.018041839823126793, -0.004808448255062103, 0.001276609138585627, 0.003999139182269573, 0.0009015840478241444, 0.010084196925163269, -0.01013160403817892, 0.02486848272383213, -0.011621545068919659, -0.04775940626859665, 0.008194679394364357, 0.017472952604293823, 0.02061537466943264, -0.005133526399731636, -0.0015889890491962433, -0.01691761054098606, 0.00633225217461586, -0.02077791467308998, -0.0013138577342033386, 0.009975837543606758, -0.013158894143998623, -0.033753953874111176, 0.022525209933519363, -0.004432576708495617, 0.00391786964610219, 0.0012960799504071474, 0.0014501535333693027, -0.022064682096242905, 0.005201251246035099, -0.0496286042034626, 0.011425144039094448, -0.007747697178274393, 0.04819284379482269, -0.0071178581565618515, -0.010402502492070198, -0.02550509385764599, -0.03464791923761368, -0.029880104586482048, -0.019856858998537064, 0.007558068260550499, 0.010314459912478924, 0.03562315180897713, 0.00570579944178462, 0.013470428064465523, 0.030313542112708092, -0.02874233014881611, -0.014018997550010681, -0.005509397946298122, -0.023039916530251503, 0.005123367998749018, 0.009840387850999832, 0.008160817436873913, -0.003633426036685705, -0.015183860436081886, -0.020466381683945656, -0.005390880163758993, -0.007612247951328754, -0.004412259440869093, 0.008993830531835556, -0.005773524288088083, 0.0006882515153847635, 0.01030768733471632, -0.004987918771803379, 0.009664303623139858, 0.011804401874542236, -0.0035961775574833155, -0.030638620257377625, -0.01104588620364666, 0.002988348947837949, -0.025410279631614685, -0.010998479090631008, -0.008363991044461727, -0.00024338536604773253, 0.006904525216668844, -0.0017193588428199291, 0.0037011506501585245, 0.0013985134428367019, 0.009610123932361603, -0.012935402803122997, 0.03795287758111954, -0.021793784573674202, -0.005908973515033722, 0.011933078989386559, 0.016958246007561684, -0.0137413265183568, -0.02680540643632412, 0.009020919911563396, -0.004341148305684328, -0.0178793016821146, 0.008506212383508682, -0.010856256820261478, -0.002473641885444522, 0.010869801975786686, 0.041366200894117355, -0.011933078989386559, -0.009975837543606758, -0.017513588070869446, 0.00011163981253048405, -0.02282319776713848, 0.005147071555256844, 0.01181117445230484, 0.0033032684586942196, -0.016958246007561684, -0.001860734075307846, 0.011817947030067444, 0.004581570625305176, -0.034106120467185974, 0.000609098351560533, -0.01750004291534424, 0.004537549801170826, 0.025125835090875626, 0.008932878263294697, -0.009325680322945118, -0.015089046210050583, 0.024001607671380043, -0.015657933428883553, -0.006467701401561499, 0.21455161273479462, -0.01680925115942955, 0.012258157134056091, 0.007219444960355759, -0.026669956743717194, -0.0010040175402536988, 0.028065083548426628, -0.0029951215256005526, -0.013619421981275082, 0.022186586633324623, -0.009894567541778088, 0.02947375550866127, -0.021888598799705505, -0.004398714285343885, 0.00709754042327404, -0.004012683872133493, -0.038575947284698486, -0.028525611385703087, -0.018421098589897156, -0.029771745204925537, 0.00922409351915121, 0.003670674515888095, -0.027225298807024956, -0.015468304045498371, 0.029825923964381218, 0.0069620911963284016, -0.012901540845632553, -0.004242947790771723, 0.018840990960597992, 0.008167590014636517, -0.01021964568644762, 0.0007521666120737791, -0.0015644388040527701, -0.008865153416991234, -0.012691594660282135, 0.007950871251523495, 0.010104513727128506, 0.009251183830201626, 0.00792378094047308, -0.008871925994753838, 0.017445862293243408, -0.018637817353010178, 0.00315935374237597, -0.03191184252500534, -9.476156265009195e-05, 0.032399460673332214, -0.011452234350144863, -0.019613051787018776, -0.02113008312880993, 0.02520710416138172, -0.014601428993046284, -0.011350646615028381, 0.02885068953037262, 0.021847963333129883, -0.005445059854537249, -0.00246348325163126, -0.012000803835690022, 0.011066203936934471, -3.261892925365828e-05, 0.01976204477250576, 0.00013185138232074678, 0.01189921610057354, -0.01901707425713539, 0.03367268294095993, 0.0060274917632341385, -0.0029138519894331694, -0.008296266198158264, 0.008194679394364357, -0.0014543862780556083, -0.019680775701999664, -0.005089505575597286, -0.01573920249938965, -0.025396734476089478, -0.015156771056354046, -0.030015554279088974, -0.025586362928152084, 0.02618234045803547, 0.012650960125029087, 0.012650960125029087, 0.02522064931690693, -0.013118259608745575, -0.01450661476701498, -0.0023178751580417156, -0.015766292810440063, -0.01317243929952383, -0.03838631510734558, 0.01053795125335455, -0.019152523949742317, 0.003636812325567007, -0.005804000422358513, -0.01544121466577053, -0.00015290950250346214, -0.012021120637655258, -0.016159094870090485, 0.005238499492406845, -0.014777513220906258, 0.002856286009773612, 0.024109967052936554, 9.322717960458249e-05, 0.012684822082519531, -0.022728383541107178, 0.006234051659703255, 0.0274826530367136, 0.0040973396971821785, -0.0007572459289804101, 0.003035756293684244, 0.007842511869966984, -0.010341550223529339, 0.007212672382593155, -0.009129279293119907, -0.013138577342033386, -0.02906740829348564, 0.008025367744266987, -0.0016220047837123275, 0.008905787952244282, 0.01812310889363289, -0.019179614260792732, -0.027956724166870117, 0.020358022302389145, -0.013714236207306385, -0.021279076114296913, -0.006968863774091005, -0.013450110331177711, 0.017933480441570282, -0.01862427219748497, -0.009034465067088604, -0.0364629365503788, 0.007287169340997934, -0.022213676944375038, -0.0005900507676415145, 0.01720205508172512, -0.027685826644301414, 0.0008110023918561637, 0.004439349286258221, -0.008479123003780842, -0.011492868885397911, 0.013226618990302086, -0.007199127692729235, 0.008472350426018238, -0.0039923666045069695, -0.0040567051619291306, 0.010402502492070198, -0.012793181464076042, 0.008052458055317402, 0.004547708667814732, -0.034945905208587646, 0.02225431054830551, 0.025816626846790314, -0.0033015753142535686, -0.02726593427360058, -0.007693517487496138, 0.0133417509496212, 0.003839986165985465, -0.0014222171157598495, 0.020588286221027374, -0.00734134903177619, -0.016497718170285225, -0.01476396806538105, -0.02106235735118389, 0.006037650164216757, -0.05453186854720116, 0.028606880456209183, 0.015305764973163605, -0.016619622707366943, -0.01510259136557579, -0.02146870642900467, -0.17370010912418365, 0.01981622539460659, 0.015414124354720116, -0.02925703674554825, 0.03621912747621536, 0.017567766830325127, 0.017649037763476372, -0.002563376910984516, 0.0031237981747835875, -0.004788130987435579, 0.0010692025534808636, 0.0035995638463646173, -0.010883347131311893, -0.01664671301841736, -0.01479105744510889, 0.016213275492191315, -0.02339208498597145, 0.01947760209441185, 0.03088242933154106, -0.0015618990873917937, 0.007984733209013939, -0.030448991805315018, 0.0025295147206634283, -0.0016871896805241704, 0.02043929137289524, 0.016145549714565277, -0.002195970853790641, 0.0031745918095111847, -0.013030217960476875, -0.02646678313612938, 0.00726685207337141, 0.015495394356548786, 0.011953395791351795, -0.0035216803662478924, 0.05512784421443939, -0.014465979300439358, 0.003154274309054017, -0.02072373405098915, -0.036598388105630875, 0.01624036394059658, 0.03323924541473389, 0.025369644165039062, 0.03605658933520317, -0.02236267179250717, -0.018543001264333725, 0.0033997760619968176, 0.02792963571846485, -0.03004264272749424, 0.0005765058449469507, -0.00925795640796423, -0.003370993072167039, -0.031830571591854095, -0.004700088873505592, -0.008391081355512142, 0.002686974359676242, 0.005844634957611561, -0.005665164906531572, 0.031370047479867935, -0.004520618822425604, 0.009244411252439022, -0.011926306411623955, -0.014046086929738522, 0.01535994466394186, 0.013605876825749874, -0.0005502625717781484, -0.026101069524884224, -0.020967543125152588, -0.00208761147223413, -0.031315866857767105, 0.01782512106001377, -0.008269176818430424, -0.02822762355208397, 0.0026852814480662346, -0.0020588284824043512, -0.0020385112147778273, 0.010578586719930172, -0.022051136940717697, 0.02061537466943264, -0.00925795640796423, 0.0019860246684402227, -0.006200189236551523, 0.023486899212002754, -0.00875002145767212, -0.0038873935118317604, 0.002060521626845002, -0.007212672382593155, 0.010653083212673664, -0.00015915287076495588, -0.0011360805947333574, -0.0256134532392025, 0.01078853290528059, -0.005526329390704632, -0.015698567032814026, -0.007822194136679173, -0.0032135334331542253, 0.013558469712734222, 0.008018595166504383, 0.0035961775574833155, -0.0020943840499967337, -0.021726058796048164, 0.02367652766406536, -0.007903463207185268, 0.004371624439954758, -0.006237437948584557, 0.04350629821419716, 0.014344075694680214, -0.014696243219077587, 0.016484173014760017, 0.050116222351789474, -0.0013350216904655099, -0.03174930438399315, 0.015427669510245323, 0.004070249851793051, 0.02662932127714157, 0.0038670760113745928, 0.01908479817211628, 0.016660258173942566, -0.031668033450841904, 0.012508737854659557, -0.016226820647716522, 0.033022526651620865, 0.0017676126444712281, -0.011120383627712727, 0.018217923119664192, -0.015915285795927048, -0.019003529101610184, -0.11626963317394257, -0.014046086929738522, -0.003252475056797266, 0.010273825377225876, -0.0075038885697722435, 0.027902545407414436, -0.026656411588191986, 0.03391649201512337, -0.029202857986092567, 0.034675005823373795, -0.007977960631251335, -0.042043447494506836, 0.012102390639483929, -0.00354199786670506, -0.002629408612847328, -0.024854937568306923, -0.009671076200902462, -0.015752747654914856, -0.020628919824957848, 0.028498521074652672, 0.015346399508416653, -0.0091157341375947, -0.0043783970177173615, -0.0001617983652977273, 0.0019420036114752293, 0.002756392117589712, -0.03220983222126961, 0.011398054659366608, 0.017134329304099083, 0.0114725511521101, -0.010273825377225876, -0.01579338312149048, 0.02209177240729332, -0.011431916616857052, 0.023554624989628792, -0.024827847257256508, -0.0003678293724078685, 0.009908112697303295, 0.02079145982861519, -0.005133526399731636, 0.0029240106232464314, 0.019342152401804924, 0.02424541488289833, -0.01172990445047617, 0.00494051119312644, 0.0010226417798548937, -0.026250064373016357, 0.021495794877409935, -0.002822423819452524, -0.04025551676750183, -0.03638166934251785, 0.006846959702670574, -0.030421901494264603, -0.005543260369449854, 0.01839400827884674, 0.0016897293971851468, 0.0295279361307621, 0.0051030502654612064, 0.004828765522688627, 0.008512984961271286, -0.019843315705657005, -0.013639739714562893, -0.003147501964122057, -0.007849283516407013, 0.020249662920832634, -0.008980285376310349, -0.026371968910098076, -0.008729703724384308, -0.0002124860038748011, -0.010747897438704967, -0.007429391145706177, 0.022511664777994156, -0.035352252423763275, 0.011743449606001377, 0.01005033403635025, -0.012630642391741276, -0.01468269806355238, 0.0012622176436707377, 0.02049347199499607, -1.540999801363796e-05, -0.016660258173942566, -0.011201652698218822, -4.653422365663573e-05, -0.016944700852036476, -0.000882113236002624, 0.0042023127898573875, -0.023364994674921036, 0.0025312078651040792, 0.0016440151957795024, -0.017337502911686897, 0.005231727380305529, 0.026033345609903336, 0.013517835177481174, 0.005512784235179424, 0.020886274054646492, -0.0007551295566372573, -0.0042903549037873745, -0.0008575630490668118, -0.00337268621660769, -0.01021964568644762, -0.020398655906319618, 0.00800505094230175, -0.05818899720907211, 0.025735357776284218, 0.009183458983898163, -0.028633970767259598, -0.0027868682518601418, 0.0038806209340691566, 0.013185984455049038, -0.026818951591849327, -0.02385261282324791, -0.010497316718101501, -0.05152489244937897, 0.01789284497499466, -0.014005452394485474, -0.004622205626219511, -0.010734353214502335, 0.007673199754208326, 0.009481447748839855, 0.004845696967095137, -0.0007157646468840539, -0.0071584926918148994, 0.003951731603592634, -0.00022073995205573738, 0.014520158991217613, 0.006335638463497162, -0.0032964961137622595, -0.003531839232891798, -0.03248072788119316, 0.008783883415162563, -0.011533503420650959, -0.018272103741765022, -0.008912560530006886, -0.007910235784947872, 0.015292219817638397, -0.0035860189236700535, -0.009759118780493736, 0.024827847257256508, 0.012325881980359554, 0.028823599219322205, -0.00954239908605814, 0.07617665827274323, -0.031613852828741074, -0.021793784573674202, 0.014926507137715816, -0.010829167440533638, 0.0007678279071114957, 0.005763365421444178, -0.00800505094230175, 0.0013739633141085505, 0.01970786601305008, 0.002937555545940995, 0.026209428906440735, 0.02049347199499607, -0.03754653036594391, -0.021306166425347328, -0.006674261763691902, -0.0015585129149258137, 0.0070298160426318645, -0.013761643320322037, -0.014032541774213314, 0.01567147858440876, 0.02226785570383072, -0.013551697134971619, 0.011831492185592651, -0.014046086929738522, 0.01479105744510889, -0.032670360058546066, -0.00866197980940342, 0.024096421897411346, 0.003829827532172203, -0.009833615273237228, -0.014018997550010681, -0.01206852775067091, 0.007551295682787895, 0.027414927259087563, 0.023134730756282806, -0.007564840372651815, -0.004317444749176502, 0.018082475289702415, -0.0035250666551291943, -0.004862627945840359, 0.019464056938886642, 0.005719344597309828, -0.007693517487496138, 0.012386833317577839, 0.035921141505241394, 0.010097741149365902, 0.02232203632593155, 0.01141159888356924, -0.001872585853561759, 0.017337502911686897, 0.01670089177787304, 0.004239561501890421, 0.005164002534002066, 0.0029189311899244785, 0.02164478972554207, 0.005465377122163773, -0.014046086929738522, 0.007652882486581802, 0.026222974061965942, 0.015048411674797535, 0.016836341470479965, 0.0026226360350847244, 0.009603351354598999, -0.014831692911684513, -0.04253106564283371, 0.015400579199194908, -0.018217923119664192, -0.030286451801657677, 0.00532315531745553, 0.009332452900707722, 0.01306407991796732, 0.010625993832945824, 0.0006065586931072176, 0.014493069611489773, -0.02844434231519699, 0.01634872332215309, -0.008472350426018238, -0.01670089177787304, -0.02073727920651436, 0.03388940170407295, 0.01675507239997387, 0.011784084141254425, 0.05057675018906593, -0.03516262397170067, 0.007998278364539146, 0.034675005823373795, 0.02874233014881611, -0.005367176607251167, -0.011865354143083096, -0.0023280340246856213, 0.006741986144334078, -0.01376841589808464, -0.0023060233797878027, -0.005272361915558577, 0.0029138519894331694, -0.014587883837521076, -0.01668734662234783, 0.02249811962246895, -0.004998077172785997, 0.04952024668455124, 0.015481849201023579, -0.002592159900814295, 0.007530978415161371, -0.018068930134177208, 0.004564639646559954, 0.01907125487923622, -0.002937555545940995, -0.012732229195535183, -0.011228743009269238, 0.00610537501052022, -0.0064338394440710545, 0.019206702709197998, -0.05092891678214073, -0.009589807130396366, 0.00818113423883915, -0.007876373827457428, 0.02385261282324791, -0.0008232774562202394, -0.0026073979679495096, 0.012590007856488228, 0.018949350342154503, 0.0219698678702116, 0.003907710779458284, -0.03077406994998455, -0.0021045426838099957, 0.017527133226394653, 0.001386661664582789, -0.015576663427054882, -0.03283289819955826, 0.019897494465112686, -0.018583636730909348, -0.040797311812639236, -0.014750422909855843, 0.008160817436873913, -0.012217522598803043, -0.0008249706006608903, -0.01703951507806778, 0.025288375094532967, -0.00039428428863175213, 0.001479782979004085, 0.006620082072913647, -0.02253875508904457, -0.018299194052815437, -0.009461130015552044, -0.005807386711239815, -0.024096421897411346, 0.0017676126444712281, -0.02941957674920559], metadata={'file_path': '/content/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-26', 'last_modified_date': '2024-04-26', 'document_title': 'Journeys in Writing, Programming, and Art: Exploring the Evolution of Artificial Intelligence and the Intersection of Technology and Creativity'}, excluded_embed_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], excluded_llm_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], relationships={: RelatedNodeInfo(node_id='244aec5e-98e0-48d1-81fd-9c12c2fe4c5c', node_type=, metadata={'file_path': '/content/data/paul_graham/paul_graham_essay.txt', 'file_name': 'paul_graham_essay.txt', 'file_type': 'text/plain', 'file_size': 75042, 'creation_date': '2024-04-26', 'last_modified_date': '2024-04-26'}, hash='952e9dc1a243648316292b0771f0f024a059072e500f7da0092671800767f543'), : RelatedNodeInfo(node_id='6107b60b-a45c-44f4-a3ea-e146d0151f47', node_type=, metadata={}, hash='d3386e1e52a73d6920911fc30d0592648217874646641b9e7c64ef1c1f4cc82b')}, text='What I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach', start_char_idx=2, end_char_idx=4473, text_template='[Excerpt from document]\\n{metadata_str}\\nExcerpt:\\n-----\\n{content}\\n-----\\n', metadata_template='{key}: {value}', metadata_seperator='\\n')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Cache" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 5/5 [00:01<00:00, 4.76it/s]\n" + ] + } + ], + "source": [ + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " ]\n", + ")\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# save and load\n", + "pipeline.cache.persist(\"./llama_cache.json\")\n", + "new_cache = IngestionCache.from_persist_path(\"./llama_cache.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "new_pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " ],\n", + " cache=new_cache,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Now it will run instantly due to the cache.\n", + "\n", + "Will be very useful when extracting metadata and also creating embeddings" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "nodes = new_pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's add embeddings to it. You will observe that the parsing of nodes, title extraction is loaded from cache and OpenAI embeddings are created now." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " OpenAIEmbedding(),\n", + " ],\n", + " cache=new_cache,\n", + ")\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# save and load\n", + "pipeline.cache.persist(\"./nodes_embedding.json\")\n", + "nodes_embedding_cache = IngestionCache.from_persist_path(\n", + " \"./nodes_embedding.json\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " OpenAIEmbedding(),\n", + " ],\n", + " cache=nodes_embedding_cache,\n", + ")\n", + "\n", + "# Will load it from the cache as the transformations are same.\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'What I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0].text" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## RAG using Ingestion Pipeline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import qdrant_client\n", + "\n", + "from llama_index.vector_stores.qdrant import QdrantVectorStore\n", + "\n", + "client = qdrant_client.QdrantClient(location=\":memory:\")\n", + "vector_store = QdrantVectorStore(\n", + " client=client, collection_name=\"llama_index_vector_store\"\n", + ")\n", + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TitleExtractor(),\n", + " OpenAIEmbedding(),\n", + " ],\n", + " cache=nodes_embedding_cache,\n", + " vector_store=vector_store,\n", + ")\n", + "# Ingest directly into a vector db\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex\n", + "\n", + "index = VectorStoreIndex.from_vector_store(vector_store)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_engine = index.as_query_engine()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham skipped a step in the evolution of computers and went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting to him.\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What did paul graham do growing up?\")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Custom Transformations\n", + "\n", + "Implementing custom transformations is pretty easy.\n", + "\n", + "Let's include a transformation that removes special characters from the text before generating embeddings.\n", + "\n", + "The primary requirement for transformations is that they should take a list of nodes as input and return a modified list of nodes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.schema import TransformComponent\n", + "import re\n", + "\n", + "\n", + "class TextCleaner(TransformComponent):\n", + " def __call__(self, nodes, **kwargs):\n", + " for node in nodes:\n", + " node.text = re.sub(r\"[^0-9A-Za-z ]\", \"\", node.text)\n", + " return nodes\n", + "\n", + "\n", + "pipeline = IngestionPipeline(\n", + " transformations=[\n", + " TokenTextSplitter(chunk_size=1024, chunk_overlap=100),\n", + " TextCleaner(),\n", + " OpenAIEmbedding(),\n", + " ],\n", + " cache=nodes_embedding_cache,\n", + ")\n", + "\n", + "nodes = pipeline.run(documents=documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'What I Worked OnFebruary 2021Before college the two main things I worked on outside of school were writing and programming I didnt write essays I wrote what beginning writers were supposed to write then and probably still are short stories My stories were awful They had hardly any plot just characters with strong feelings which I imagined made them deepThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called data processing This was in 9th grade so I was 13 or 14 The school districts 1401 happened to be in the basement of our junior high school and my friend Rich Draves and I got permission to use it It was like a mini Bond villains lair down there with all these alienlooking machines CPU disk drives printer card reader sitting up on a raised floor under bright fluorescent lightsThe language we used was an early version of Fortran You had to type programs on punch cards then stack them in the card reader and press a button to load the program into memory and run it The result would ordinarily be to print something on the spectacularly loud printerI was puzzled by the 1401 I couldnt figure out what to do with it And in retrospect theres not much I could have done with it The only form of input to programs was data stored on punched cards and I didnt have any data stored on punched cards The only other option was to do things that didnt rely on any input like calculate approximations of pi but I didnt know enough math to do anything interesting of that type So Im not surprised I cant remember any programs I wrote because they cant have done much My clearest memory is of the moment I learned it was possible for programs not to terminate when one of mine didnt On a machine without timesharing this was a social as well as a technical error as the data center managers expression made clearWith microcomputers everything changed Now you could have a computer sitting right in front of you on a desk that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping 1The first of my friends to get a microcomputer built it himself It was sold as a kit by Heathkit I remember vividly how impressed and envious I felt watching him sitting in front of it typing programs right into the computerComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one a TRS80 in about 1980 The gold standard then was the Apple II but a TRS80 was good enough This was when I really started programming I wrote simple games a program to predict how high my model rockets would fly and a word processor that my father used to write at least one book There was only room in memory for about 2 pages of text so hed write 2 pages at a time and then print them out but it was a lot better than a typewriterThough I liked programming I didnt plan to study it in college In college I was going to study philosophy which sounded much more powerful It seemed to my naive high school self to be the study of the ultimate truths compared to which the things studied in other fields would be mere domain knowledge What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasnt much left for these supposed ultimate truths All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignoredI couldnt have put this into words when I was 18 All I knew at the time was that I kept taking philosophy courses and they kept being boring So I decided to switch to AIAI was in the air in the mid 1980s but there were two things especially that made me want to work on it a novel by Heinlein called The Moon is a Harsh Mistress which featured an intelligent computer called Mike and a PBS documentary that showed Terry Winograd using SHRDLU I havent tried rereading The Moon is a Harsh Mistress so I dont know how well it has aged but when I read it I was drawn entirely into its world It seemed only a matter of time before wed have Mike and when I saw Winograd using SHRDLU it seemed like that time would be a few years at most All you had to do was teach SHRDLU more wordsThere werent any classes in AI at Cornell then not even graduate classes so I started trying to teach'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nodes[0].text" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-4/Metadata_Extraction.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-4/Metadata_Extraction.ipynb new file mode 100644 index 0000000000000..9f8aee286c693 --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-4/Metadata_Extraction.ipynb @@ -0,0 +1,2551 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Metadata Extraction\n", + "\n", + "In this notebook we will demonstrate following:\n", + "\n", + "1. RAG using Metadata Extractors.\n", + "2. Extract Metadata using PydanticProgram." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index\n", + "!pip install llama_index-readers-web" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()\n", + "\n", + "import os" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup API Key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Define LLM" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core.schema import MetadataMode\n", + "from llama_index.core import Settings" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "llm = OpenAI(temperature=0.1, model=\"gpt-3.5-turbo\", max_tokens=512)\n", + "Settings.llm = llm" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Node Parser and Metadata Extractors" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.node_parser import TokenTextSplitter\n", + "from llama_index.core.extractors import (\n", + " QuestionsAnsweredExtractor,\n", + ")\n", + "\n", + "node_parser = TokenTextSplitter(\n", + " separator=\" \", chunk_size=256, chunk_overlap=128\n", + ")\n", + "\n", + "question_extractor = QuestionsAnsweredExtractor(\n", + " questions=3, llm=llm, metadata_mode=MetadataMode.EMBED\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.readers.web import SimpleWebPageReader\n", + "\n", + "reader = SimpleWebPageReader(html_to_text=True)\n", + "docs = reader.load_data(urls=[\"https://eugeneyan.com/writing/llm-patterns/\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# [eugeneyan](/)\n", + "\n", + " * [Start Here](/start-here/ \"Start Here\")\n", + " * [Writing](/writing/ \"Writing\")\n", + " * [Speaking](/speaking/ \"Speaking\")\n", + " * [Prototyping](/prototyping/ \"Prototyping\")\n", + " * [About](/about/ \"About\")\n", + "\n", + "# Patterns for Building LLM-based Systems & Products\n", + "\n", + "[ [llm](/tag/llm/) [engineering](/tag/engineering/)\n", + "[production](/tag/production/) [🔥](/tag/🔥/) ] · 66 min read\n", + "\n", + "> Discussions on [HackerNews](https://news.ycombinator.com/item?id=36965993),\n", + "> [Twitter](https://twitter.com/eugeneyan/status/1686531758701899776), and\n", + "> [LinkedIn](https://www.linkedin.com/posts/eugeneyan_patterns-for-building-\n", + "> llm-based-systems-activity-7092300473981927424-_wVo)\n", + "\n", + "“There is a large class of problems that are easy to imagine and build demos\n", + "for, but extremely hard to make products out of. For example, self-driving:\n", + "It’s easy to demo a car self-driving around a block, but making it into a\n", + "product takes a decade.” -\n", + "[Karpathy](https://twitter.com/eugeneyan/status/1672692174704766976)\n", + "\n", + "This write-up is about practical patterns for integrating large language\n", + "models (LLMs) into systems & products. We’ll build on academic research,\n", + "industry resources, and practitioner know-how, and distill them into key ideas\n", + "and practices.\n", + "\n", + "There are seven key patterns. They’re also organized along the spectrum of\n", + "improving performance vs. reducing cost/risk, and closer to the data vs.\n", + "closer to the user.\n", + "\n", + " * Evals: To measure performance\n", + " * RAG: To add recent, external knowledge\n", + " * Fine-tuning: To get better at specific tasks\n", + " * Caching: To reduce latency & cost\n", + " * Guardrails: To ensure output quality\n", + " * Defensive UX: To anticipate & manage errors gracefully\n", + " * Collect user feedback: To build our data flywheel\n", + "\n", + "(Also see this addendum on [how to match these LLM patterns to potential\n", + "problems](/writing/llm-problems/).)\n", + "\n", + "![Image](/assets/llm-patterns-og.png)\n", + "\n", + "LLM patterns: From data to user, from defensive to offensive (see connections\n", + "between patterns)\n", + "\n", + "## Evals: To measure performance\n", + "\n", + "Evaluations are a set of measurements used to assess a model’s performance on\n", + "a task. They include benchmark data and metrics. From a [HackerNews\n", + "comment](https://news.ycombinator.com/item?id=36789901):\n", + "\n", + "> How important evals are to the team is a major differentiator between folks\n", + "> rushing out hot garbage and those seriously building products in the space.\n", + "\n", + "### Why evals?\n", + "\n", + "Evals enable us to measure how well our system or product is doing and detect\n", + "any regressions. (A system or product can be made up of multiple components\n", + "such as LLMs, prompt templates, retrieved context, and parameters like\n", + "temperature.) A representative set of evals takes us a step towards measuring\n", + "system changes at scale. Without evals, we would be flying blind, or would\n", + "have to visually inspect LLM outputs with each change.\n", + "\n", + "### More about evals\n", + "\n", + "**There are many benchmarks in the field of language modeling**. Some notable\n", + "ones are:\n", + "\n", + " * **[MMLU](https://arxiv.org/abs/2009.03300)** : A set of 57 tasks that span elementary math, US history, computer science, law, and more. To perform well, models must possess extensive world knowledge and problem-solving ability.\n", + " * **[EleutherAI Eval](https://github.com/EleutherAI/lm-evaluation-harness)** : Unified framework to test models via zero/few-shot settings on 200 tasks. Incorporates a large number of evals including BigBench, MMLU, etc.\n", + " * **[HELM](https://arxiv.org/abs/2211.09110)** : Instead of specific tasks and metrics, HELM offers a comprehensive assessment of LLMs by evaluating them across domains. Metrics include accuracy, calibration, robustness, fairness, bias, toxicity, etc. Tasks include Q&A, information retrieval, summarization, text classification, etc.\n", + " * **[AlpacaEval](https://github.com/tatsu-lab/alpaca_eval)** : Automated evaluation framework which measures how often a strong LLM (e.g., GPT-4) prefers the output of one model over a reference model. Metrics include win rate, bias, latency, price, variance, etc. Validated to have high agreement with 20k human annotations.\n", + "\n", + "We can group metrics into two categories: context-dependent or context-free.\n", + "\n", + " * **Context-dependent** : These take context into account. They’re often proposed for a specific task; repurposing them for other tasks will require some adjustment.\n", + " * **Context-free** : These aren’t tied to the context when evaluating generated output; they only compare the output with the provided gold references. As they’re task agnostic, they’re easier to apply to a wide variety of tasks.\n", + "\n", + "To get a better sense of these metrics (and their potential shortfalls), we’ll\n", + "explore a few of the commonly used metrics such as BLEU, ROUGE, BERTScore, and\n", + "MoverScore.\n", + "\n", + "**[BLEU](https://dl.acm.org/doi/10.3115/1073083.1073135) (Bilingual Evaluation\n", + "Understudy)** is a precision-based metric: It counts the number of n-grams in\n", + "the generated output that also show up in the reference, and then divides it\n", + "by the total number of words in the output. It’s predominantly used in machine\n", + "translation and remains a popular metric due to its cost-effectiveness.\n", + "\n", + "First, precision for various values of \\\\(n\\\\) is computed:\n", + "\n", + "\\\\[\\text{precision}_n = \\frac{\\sum_{p \\in \\text{output}} \\sum_{\\text{n-gram}\n", + "\\in p} \\text{Count}_{\\text{clip}} (\\text{n-gram})}{\\sum_{p \\in \\text{output}}\n", + "\\sum_{\\text{n-gram} \\in p} \\text{Count}(\\text{n-gram})}\\\\]\n", + "\n", + "\\\\(Count_{clip}(\\text{n-gram})\\\\) is clipped by the maximum number of times an\n", + "n-gram appears in any corresponding reference sentence.\n", + "\n", + "\\\\[\\text{Count}_{\\text{clip}}(n\\text{-gram}) = \\min \\left(\\text{matched }\n", + "n\\text{-gram count}, \\max_{r \\in R} \\left(n\\text{-gram count in }\n", + "r\\right)\\right)\\\\]\n", + "\n", + "Once we’ve computed precision at various \\\\(n\\\\), a final BLEU-N score is\n", + "computed as the geometric mean of all the \\\\(precision_n\\\\) scores.\n", + "\n", + "However, since precision relies solely on n-grams and doesn’t consider the\n", + "length of the generated output, an output containing just one unigram of a\n", + "common word (like a stop word) would achieve perfect precision. This can be\n", + "misleading and encourage outputs that contain fewer words to increase BLEU\n", + "scores. To counter this, a brevity penalty is added to penalize excessively\n", + "short sentences.\n", + "\n", + "\\\\[BP = \\begin{cases} 1 & \\text{if } |p| > |r| \\\\\\ e^{1-\\frac{|r|}{|p|}} &\n", + "\\text{otherwise} \\end{cases}\\\\]\n", + "\n", + "Thus, the final formula is:\n", + "\n", + "\\\\[\\text{BLEU-N} = BP \\cdot \\exp\\left(\\sum_{n=1}^{N} W_n\n", + "\\log(\\text{precision}_n)\\right)\\\\]\n", + "\n", + "**[ROUGE](https://aclanthology.org/W04-1013/) (Recall-Oriented Understudy for\n", + "Gisting Evaluation)**: In contrast to BLEU, ROUGE is recall-oriented. It\n", + "counts the number of words in the reference that also occur in the output.\n", + "It’s typically used to assess automatic summarization tasks.\n", + "\n", + "There are several ROUGE variants. ROUGE-N is most similar to BLEU in that it\n", + "also counts the number of matching n-grams between the output and the\n", + "reference.\n", + "\n", + "\\\\[\\text{ROUGE-N} = \\frac{\\sum_{s_r \\in \\text{references}} \\sum_{n\\text{-gram}\n", + "\\in s_r} \\text{Count}_{\\text{match}} (n\\text{-gram})}{\\sum_{s_r \\in\n", + "\\text{references}} \\sum_{n\\text{-gram} \\in s_r} \\text{Count}\n", + "(n\\text{-gram})}\\\\]\n", + "\n", + "Other variants include:\n", + "\n", + " * ROUGE-L: This measures the longest common subsequence (LCS) between the output and the reference. It considers sentence-level structure similarity and zeros in on the longest series of co-occurring in-sequence n-grams.\n", + " * ROUGE-S: This measures the skip-bigram between the output and reference. Skip-bigrams are pairs of words that maintain their sentence order regardless of the words that might be sandwiched between them.\n", + "\n", + "**[BERTScore](https://arxiv.org/abs/1904.09675)** is an embedding-based metric\n", + "that uses cosine similarity to compare each token or n-gram in the generated\n", + "output with the reference sentence. There are three components to BERTScore:\n", + "\n", + " * Recall: Average cosine similarity between each token in the reference and its closest match in the generated output.\n", + " * Precision: Average cosine similarity between each token in the generated output and its nearest match in the reference.\n", + " * F1: Harmonic mean of recall and precision\n", + "\n", + "\\\\[Recall_{\\text{BERT}} = \\frac{1}{|r|} \\sum_{i \\in r} \\max_{j \\in p}\n", + "\\vec{i}^T \\vec{j}, \\quad Precision_{\\text{BERT}} = \\frac{1}{|p|} \\sum_{j \\in\n", + "p} \\max_{i \\in r} \\vec{i}^T \\vec{j}\\\\] \\\\[\\text{BERTscore} = F_{\\text{BERT}} =\n", + "\\frac{2 \\cdot P_{\\text{BERT}} \\cdot R_{\\text{BERT}}}{P_{\\text{BERT}} +\n", + "R_{\\text{BERT}}}\\\\]\n", + "\n", + "BERTScore is useful because it can account for synonyms and paraphrasing.\n", + "Simpler metrics like BLEU and ROUGE can’t do this due to their reliance on\n", + "exact matches. BERTScore has been shown to have better correlation for tasks\n", + "such as image captioning and machine translation.\n", + "\n", + "**[MoverScore](https://arxiv.org/abs/1909.02622)** also uses contextualized\n", + "embeddings to compute the distance between tokens in the generated output and\n", + "reference. But unlike BERTScore, which is based on one-to-one matching (or\n", + "“hard alignment”) of tokens, MoverScore allows for many-to-one matching (or\n", + "“soft alignment”).\n", + "\n", + "![BERTScore \\(left\\) vs. MoverScore \\(right\\)](/assets/mover-score.jpg)\n", + "\n", + "BERTScore (left) vs. MoverScore (right;\n", + "[source](https://arxiv.org/abs/1909.02622))\n", + "\n", + "MoverScore enables the mapping of semantically related words in one sequence\n", + "to their counterparts in another sequence. It does this by solving a\n", + "constrained optimization problem that finds the minimum effort to transform\n", + "one text into another. The idea is to measure the distance that words would\n", + "have to move to convert one sequence to another.\n", + "\n", + "However, there are several pitfalls to using these conventional benchmarks and\n", + "metrics.\n", + "\n", + "First, there’s **poor correlation between these metrics and human judgments.**\n", + "BLEU, ROUGE, and others have had [negative correlation with how humans\n", + "evaluate fluency](https://arxiv.org/abs/2008.12009). They also showed moderate\n", + "to less correlation with human adequacy scores. In particular, BLEU and ROUGE\n", + "have [low correlation with tasks that require creativity and\n", + "diversity](https://arxiv.org/abs/2303.16634).\n", + "\n", + "Second, these metrics often have **poor adaptability to a wider variety of\n", + "tasks**. Adopting a metric proposed for one task to another is not always\n", + "prudent. For example, exact match metrics such as BLEU and ROUGE are a poor\n", + "fit for tasks like abstractive summarization or dialogue. Since they’re based\n", + "on n-gram overlap between output and reference, they don’t make sense for a\n", + "dialogue task where a wide variety of responses are possible. An output can\n", + "have zero n-gram overlap with the reference but yet be a good response.\n", + "\n", + "Third, these metrics have **poor reproducibility**. Even for the same metric,\n", + "[high variance is reported across different\n", + "studies](https://arxiv.org/abs/2008.12009), possibly due to variations in\n", + "human judgment collection or metric parameter settings. Another study of\n", + "[ROUGE scores](https://aclanthology.org/2023.acl-long.107/) across 2,000\n", + "studies found that scores were hard to reproduce, difficult to compare, and\n", + "often incorrect because evals were often conducted with untested, incorrect\n", + "ROUGE implementations.\n", + "\n", + "![Dimensions of model evaluations with ROUGE](/assets/rogue-scores.jpg)\n", + "\n", + "Dimensions of model evaluations with ROUGE\n", + "([source](https://aclanthology.org/2023.acl-long.107/))\n", + "\n", + "And even with recent benchmarks such as MMLU, **the same model can get\n", + "significantly different scores based on the eval implementation**.\n", + "[Huggingface compared the original MMLU\n", + "implementation](https://huggingface.co/blog/evaluating-mmlu-leaderboard) with\n", + "the HELM and EleutherAI implementations and found that the same example could\n", + "have different prompts across various providers.\n", + "\n", + "![Different prompts for the same question across MMLU\n", + "implementations](/assets/mmlu-prompt.jpg)\n", + "\n", + "Different prompts for the same question across MMLU implementations\n", + "([source](https://huggingface.co/blog/evaluating-mmlu-leaderboard))\n", + "\n", + "Furthermore, the evaluation approach differed across all three benchmarks:\n", + "\n", + " * Original MMLU: Compares predicted probabilities on the answers only (A, B, C, D)\n", + " * HELM: Uses the next token probabilities from the model and picks the token with the highest probability, even if it’s _not_ one of the options.\n", + " * EleutherAI: Computes probability of the full answer sequence (i.e., a letter followed by the answer text) for each answer. Then, pick answer with highest probability.\n", + "\n", + "![Different eval for the same question across MMLU\n", + "implementations](/assets/mmlu-eval.jpg)\n", + "\n", + "Different eval for the same question across MMLU implementations\n", + "([source](https://huggingface.co/blog/evaluating-mmlu-leaderboard))\n", + "\n", + "As a result, even for the same eval, both absolute scores and model ranking\n", + "can fluctuate widely depending on eval implementation. This means that model\n", + "metrics aren’t truly comparable—even for the same eval—unless the eval’s\n", + "implementation is identical down to minute details like prompts and\n", + "tokenization. Similarly, the author of QLoRA found MMLU overly sensitive and\n", + "concluded: “[do not work with/report or trust MMLU\n", + "scores](https://twitter.com/Tim_Dettmers/status/1673446047266504704)”.\n", + "\n", + "Beyond conventional evals such as those mentioned above, **an emerging trend\n", + "is to use a strong LLM as a reference-free metric** to evaluate generations\n", + "from other LLMs. This means we may not need human judgments or gold references\n", + "for evaluation.\n", + "\n", + "**[G-Eval](https://arxiv.org/abs/2303.16634) is a framework that applies\n", + "LLMs** with Chain-of-Though (CoT) and a form-filling paradigm to **evaluate\n", + "LLM outputs**. First, they provide a task introduction and evaluation criteria\n", + "to an LLM and ask it to generate a CoT of evaluation steps. Then, to evaluate\n", + "coherence in news summarization, they concatenate the prompt, CoT, news\n", + "article, and summary and ask the LLM to output a score between 1 to 5.\n", + "Finally, they use the probabilities of the output tokens from the LLM to\n", + "normalize the score and take their weighted summation as the final result.\n", + "\n", + "![Overview of G-Eval](/assets/geval.jpg)\n", + "\n", + "Overview of G-Eval ([source](https://arxiv.org/abs/2303.16634))\n", + "\n", + "They found that GPT-4 as an evaluator had a high Spearman correlation with\n", + "human judgments (0.514), outperforming all previous methods. It also\n", + "outperformed traditional metrics on aspects such as coherence, consistency,\n", + "fluency, and relevance. On topical chat, it did better than traditional\n", + "metrics such as ROUGE-L, BLEU-4, and BERTScore across several criteria such as\n", + "naturalness, coherence, engagingness, and groundedness.\n", + "\n", + "**The[Vicuna](https://arxiv.org/abs/2306.05685) paper adopted a similar\n", + "approach.** They start by defining eight categories (writing, roleplay,\n", + "extraction, reasoning, math, coding, STEM, and humanities/social science)\n", + "before developing 10 questions for each category. Next, they generated answers\n", + "from five chatbots: LLaMA, Alpaca, ChatGPT, Bard, and Vicuna. Finally, they\n", + "asked GPT-4 to rate the quality of the answers based on helpfulness,\n", + "relevance, accuracy, and detail.\n", + "\n", + "Overall, they found that GPT-4 not only provided consistent scores but could\n", + "also give detailed explanations for those scores. Under the single answer\n", + "grading paradigm, GPT-4 had higher agreement with humans (85%) than the humans\n", + "had amongst themselves (81%). This suggests that GPT-4’s judgment aligns\n", + "closely with the human evaluators.\n", + "\n", + "**[QLoRA](https://arxiv.org/abs/2305.14314) also used an LLM to evaluate\n", + "another LLM’s output.** They asked GPT-4 to rate the performance of various\n", + "models against gpt-3.5-turbo on the Vicuna benchmark. Given the responses from\n", + "gpt-3.5-turbo and another model, GPT-4 was prompted to score both out of 10\n", + "and explain its ratings. They also measured performance via direct comparisons\n", + "between models, simplifying the task to a three-class rating scheme that\n", + "included ties.\n", + "\n", + "To validate the automated evaluation, they collected human judgments on the\n", + "Vicuna benchmark. Using Mechanical Turk, they enlisted two annotators for\n", + "comparisons to gpt-3.5-turbo, and three annotators for pairwise comparisons.\n", + "They found that human and GPT-4 ranking of models were largely in agreement,\n", + "with a Spearman rank correlation of 0.55 at the model level. This provides an\n", + "additional data point suggesting that LLM-based automated evals could be a\n", + "cost-effective and reasonable alternative to human evals.\n", + "\n", + "### How to apply evals?\n", + "\n", + "**Building solid evals should be the starting point** for any LLM-based system\n", + "or product (as well as conventional machine learning systems).\n", + "\n", + "Unfortunately, classical metrics such as BLEU and ROUGE don’t make sense for\n", + "more complex tasks such as abstractive summarization or dialogue. Furthermore,\n", + "we’ve seen that benchmarks like MMLU (and metrics like ROUGE) are sensitive to\n", + "how they’re implemented and measured. And to be candid, unless your LLM system\n", + "is studying for a school exam, using MMLU as an eval [doesn’t quite make\n", + "sense](https://twitter.com/Tim_Dettmers/status/1680782418335367169).\n", + "\n", + "Thus, instead of using off-the-shelf benchmarks, we can **start by collecting\n", + "a set of task-specific evals** (i.e., prompt, context, expected outputs as\n", + "references). These evals will then guide prompt engineering, model selection,\n", + "fine-tuning, and so on. And as we update our systems, we can run these evals\n", + "to quickly measure improvements or regressions. Think of it as Eval Driven\n", + "Development (EDD).\n", + "\n", + "In addition to the evaluation dataset, we **also need useful metrics**. They\n", + "help us distill performance changes into a single number that’s comparable\n", + "across eval runs. And if we can simplify the problem, we can choose metrics\n", + "that are easier to compute and interpret.\n", + "\n", + "The simplest task is probably classification: If we’re using an LLM for\n", + "classification-like tasks (e.g., toxicity detection, document categorization)\n", + "or extractive QA without dialogue, we can rely on standard classification\n", + "metrics such as recall, precision, PRAUC, etc. If our task has no correct\n", + "answer but we have references (e.g., machine translation, extractive\n", + "summarization), we can rely on reference metrics based on matching (BLEU,\n", + "ROUGE) or semantic similarity (BERTScore, MoverScore).\n", + "\n", + "However, these metrics may not work for more open-ended tasks such as\n", + "abstractive summarization, dialogue, and others. But collecting human\n", + "judgments can be slow and expensive. Thus, we may opt to lean on **automated\n", + "evaluations via a strong LLM**.\n", + "\n", + "Relative to human judgments which are typically noisy (due to differing biases\n", + "among annotators), LLM judgments tend to be less noisy (as the bias is more\n", + "systematic) but more biased. Nonetheless, since we’re aware of these biases,\n", + "we can mitigate them accordingly:\n", + "\n", + " * Position bias: LLMs tend to favor the response in the first position. To mitigate this, we can evaluate the same pair of responses twice while swapping their order. If the same response is preferred in both orders, we mark it as a win; else, it’s a tie.\n", + " * Verbosity bias: LLMs tend to favor longer, wordier responses over more concise ones, even if the latter is clearer and of higher quality. A possible solution is to ensure that comparison responses are similar in length.\n", + " * Self-enhancement bias: LLMs have a slight bias towards their own answers. [GPT-4 favors itself with a 10% higher win rate while Claude-v1 favors itself with a 25% higher win rate.](https://arxiv.org/abs/2306.05685) To counter this, don’t use the same LLM for evaluation tasks.\n", + "\n", + "Another tip: Rather than asking an LLM for a direct evaluation (via giving a\n", + "score), try giving it a reference and asking for a comparison. This helps with\n", + "reducing noise.\n", + "\n", + "Finally, sometimes the best eval is human eval aka vibe check. (Not to be\n", + "confused with the poorly named code evaluation benchmark\n", + "[HumanEval](https://arxiv.org/abs/2107.03374).) As mentioned in the [Latent\n", + "Space podcast with MosaicML](https://www.latent.space/p/mosaic-mpt-7b#details)\n", + "(34th minute):\n", + "\n", + "> The vibe-based eval cannot be underrated. … One of our evals was just having\n", + "> a bunch of prompts and watching the answers as the models trained and see if\n", + "> they change. Honestly, I don’t really believe that any of these eval metrics\n", + "> capture what we care about. One of our prompts was “suggest games for a\n", + "> 3-year-old and a 7-year-old to play” and that was a lot more valuable to see\n", + "> how the answer changed during the course of training. — Jonathan Frankle\n", + "\n", + "Also see this [deep dive into evals](/writing/abstractive/) for abstractive\n", + "summarization. It covers reference, context, and preference-based metrics, and\n", + "also discusses hallucination detection.\n", + "\n", + "## Retrieval-Augmented Generation: To add knowledge\n", + "\n", + "Retrieval-Augmented Generation (RAG) fetches relevant data from outside the\n", + "foundation model and enhances the input with this data, providing richer\n", + "context to improve output.\n", + "\n", + "### Why RAG?\n", + "\n", + "RAG helps reduce hallucination by grounding the model on the retrieved\n", + "context, thus increasing factuality. In addition, it’s cheaper to keep\n", + "retrieval indices up-to-date than to continuously pre-train an LLM. This cost\n", + "efficiency makes it easier to provide LLMs with access to recent data via RAG.\n", + "Finally, if we need to update or remove data such as biased or toxic\n", + "documents, it’s more straightforward to update the retrieval index (compared\n", + "to fine-tuning or prompting an LLM not to generate toxic outputs).\n", + "\n", + "In short, RAG applies mature and simpler ideas from the field of information\n", + "retrieval to support LLM generation. In a [recent Sequoia\n", + "survey](https://www.sequoiacap.com/article/llm-stack-perspective/), 88% of\n", + "respondents believe that retrieval will be a key component of their stack.\n", + "\n", + "### More about RAG\n", + "\n", + "Before diving into RAG, it helps to have a basic understanding of text\n", + "embeddings. (Feel free to skip this section if you’re familiar with the\n", + "subject.)\n", + "\n", + "A text embedding is a **compressed, abstract representation of text data**\n", + "where text of arbitrary length can be represented as a fixed-size vector of\n", + "numbers. It’s usually learned from a corpus of text such as Wikipedia. Think\n", + "of them as a universal encoding for text, where **similar items are close to\n", + "each other while dissimilar items are farther apart**.\n", + "\n", + "A good embedding is one that does well on a downstream task, such as\n", + "retrieving similar items. Huggingface’s [Massive Text Embedding Benchmark\n", + "(MTEB)](https://huggingface.co/spaces/mteb/leaderboard) scores various models\n", + "on diverse tasks such as classification, clustering, retrieval, summarization,\n", + "etc.\n", + "\n", + "Quick note: While we mainly discuss text embeddings here, embeddings can take\n", + "many modalities. For example, [CLIP](https://arxiv.org/abs/2103.00020) is\n", + "multimodal and embeds images and text in the same space, allowing us to find\n", + "images most similar to an input text. We can also [embed products based on\n", + "user behavior](/writing/search-query-matching/#supervised-techniques-improves-\n", + "modeling-of-our-desired-event) (e.g., clicks, purchases) or [graph\n", + "relationships](/writing/search-query-matching/#self-supervised-techniques-no-\n", + "need-for-labels).\n", + "\n", + "**RAG has its roots in open-domain Q &A.** An early [Meta\n", + "paper](https://arxiv.org/abs/2005.04611) showed that retrieving relevant\n", + "documents via TF-IDF and providing them as context to a language model (BERT)\n", + "improved performance on an open-domain QA task. They converted each task into\n", + "a cloze statement and queried the language model for the missing token.\n", + "\n", + "Following that, **[Dense Passage Retrieval\n", + "(DPR)](https://arxiv.org/abs/2004.04906)** showed that using dense embeddings\n", + "(instead of a sparse vector space such as TF-IDF) for document retrieval can\n", + "outperform strong baselines like Lucene BM25 (65.2% vs. 42.9% for top-5\n", + "accuracy.) They also showed that higher retrieval precision translates to\n", + "higher end-to-end QA accuracy, highlighting the importance of upstream\n", + "retrieval.\n", + "\n", + "To learn the DPR embedding, they fine-tuned two independent BERT-based\n", + "encoders on existing question-answer pairs. The passage encoder (\\\\(E_p\\\\))\n", + "embeds text passages into vectors while the query encoder (\\\\(E_q\\\\)) embeds\n", + "questions into vectors. The query embedding is then used to retrieve \\\\(k\\\\)\n", + "passages that are most similar to the question.\n", + "\n", + "They trained the encoders so that the dot-product similarity makes a good\n", + "ranking function, and optimized the loss function as the negative log-\n", + "likelihood of the positive passage. The DPR embeddings are optimized for\n", + "maximum inner product between the question and relevant passage vectors. The\n", + "goal is to learn a vector space such that pairs of questions and their\n", + "relevant passages are close together.\n", + "\n", + "For inference, they embed all passages (via \\\\(E_p\\\\)) and index them in FAISS\n", + "offline. Then, given a question at query time, they compute the question\n", + "embedding (via \\\\(E_q\\\\)), retrieve the top \\\\(k\\\\) passages via approximate\n", + "nearest neighbors, and provide it to the language model (BERT) that outputs\n", + "the answer to the question.\n", + "\n", + "**[Retrieval Augmented Generation (RAG)](https://arxiv.org/abs/2005.11401)** ,\n", + "from which this pattern gets its name, highlighted the downsides of pre-\n", + "trained LLMs. These include not being able to expand or revise memory, not\n", + "providing insights into generated output, and hallucinations.\n", + "\n", + "To address these downsides, they introduced RAG (aka semi-parametric models).\n", + "Dense vector retrieval serves as the non-parametric component while a pre-\n", + "trained LLM acts as the parametric component. They reused the DPR encoders to\n", + "initialize the retriever and build the document index. For the LLM, they used\n", + "BART, a 400M parameter seq2seq model.\n", + "\n", + "![Overview of Retrieval Augmented Generation](/assets/rag.jpg)\n", + "\n", + "Overview of Retrieval Augmented Generation\n", + "([source](https://arxiv.org/abs/2005.11401))\n", + "\n", + "During inference, they concatenate the input with the retrieved document.\n", + "Then, the LLM generates \\\\(\\text{token}_i\\\\) based on the original input, the\n", + "retrieved document, and the previous \\\\(i-1\\\\) tokens. For generation, they\n", + "proposed two approaches that vary in how the retrieved passages are used to\n", + "generate output.\n", + "\n", + "In the first approach, RAG-Sequence, the model uses the same document to\n", + "generate the complete sequence. Thus, for \\\\(k\\\\) retrieved documents, the\n", + "generator produces an output for each document. Then, the probability of each\n", + "output sequence is marginalized (sum the probability of each output sequence\n", + "in \\\\(k\\\\) and weigh it by the probability of each document being retrieved).\n", + "Finally, the output sequence with the highest probability is selected.\n", + "\n", + "On the other hand, RAG-Token can generate each token based on a _different_\n", + "document. Given \\\\(k\\\\) retrieved documents, the generator produces a\n", + "distribution for the next output token for each document before marginalizing\n", + "(aggregating all the individual token distributions.). The process is then\n", + "repeated for the next token. This means that, for each token generation, it\n", + "can retrieve a different set of \\\\(k\\\\) relevant documents based on the\n", + "original input _and_ previously generated tokens. Thus, documents can have\n", + "different retrieval probabilities and contribute differently to the next\n", + "generated token.\n", + "\n", + "[**Fusion-in-Decoder (FiD)**](https://arxiv.org/abs/2007.01282) also uses\n", + "retrieval with generative models for open-domain QA. It supports two methods\n", + "for retrieval, BM25 (Lucene with default parameters) and DPR. FiD is named for\n", + "how it performs fusion on the retrieved documents in the decoder only.\n", + "\n", + "![Overview of Fusion-in-Decoder](/assets/fid.jpg)\n", + "\n", + "Overview of Fusion-in-Decoder ([source](https://arxiv.org/abs/2007.01282))\n", + "\n", + "For each retrieved passage, the title and passage are concatenated with the\n", + "question. These pairs are processed independently in the encoder. They also\n", + "add special tokens such as `question:`, `title:`, and `context:` before their\n", + "corresponding sections. The decoder attends over the concatenation of these\n", + "retrieved passages.\n", + "\n", + "Because it processes passages independently in the encoder, it can scale to a\n", + "large number of passages as it only needs to do self-attention over one\n", + "context at a time. Thus, compute grows linearly (instead of quadratically)\n", + "with the number of retrieved passages, making it more scalable than\n", + "alternatives such as RAG-Token. Then, during decoding, the decoder processes\n", + "the encoded passages jointly, allowing it to better aggregate context across\n", + "multiple retrieved passages.\n", + "\n", + "[**Retrieval-Enhanced Transformer (RETRO)**](https://arxiv.org/abs/2112.04426)\n", + "adopts a similar pattern where it combines a frozen BERT retriever, a\n", + "differentiable encoder, and chunked cross-attention to generate output. What’s\n", + "different is that RETRO does retrieval throughout the entire pre-training\n", + "stage, and not just during inference. Furthermore, they fetch relevant\n", + "documents based on chunks of the input. This allows for finer-grained,\n", + "repeated retrieval during generation instead of only retrieving once per\n", + "query.\n", + "\n", + "For each input chunk (\\\\(C_u\\\\)), the \\\\(k\\\\) retrieved chunks \\\\(RET(C_u)\\\\)\n", + "are fed into an encoder. The output is the encoded neighbors \\\\(E^{j}_{u}\\\\)\n", + "where \\\\(E^{j}_{u} = \\text{Encoder}(\\text{RET}(C_{u})^{j}, H_{u}) \\in\n", + "\\mathbb{R}^{r \\times d_{0}}\\\\). Here, each chunk encoding is conditioned on\n", + "\\\\(H_u\\\\) (the intermediate activations) and the activations of chunk\n", + "\\\\(C_u\\\\) through cross-attention layers. In short, the encoding of the\n", + "retrieved chunks depends on the attended activation of the input chunk.\n", + "\\\\(E^{j}_{u}\\\\) is then used to condition the generation of the next chunk.\n", + "\n", + "![Overview of RETRO](/assets/retro.jpg)\n", + "\n", + "Overview of RETRO ([source](https://arxiv.org/abs/2112.04426))\n", + "\n", + "During retrieval, RETRO splits the input sequence into chunks of 64 tokens.\n", + "Then, it finds text similar to the _previous_ chunk to provide context to the\n", + "_current_ chunk. The retrieval index consists of two contiguous chunks of\n", + "tokens, \\\\(N\\\\) and \\\\(F\\\\). The former is the neighbor chunk (64 tokens)\n", + "which is used to compute the key while the latter is the continuation chunk\n", + "(64 tokens) in the original document.\n", + "\n", + "Retrieval is based on approximate \\\\(k\\\\)-nearest neighbors via \\\\(L_2\\\\)\n", + "distance (euclidean) on BERT embeddings. (Interesting departure from the usual\n", + "cosine or dot product similarity.) The retrieval index, built on SCaNN, can\n", + "query a 2T token database in 10ms.\n", + "\n", + "They also demonstrated how to RETRO-fit existing baseline models. By freezing\n", + "the pre-trained weights and only training the chunked cross-attention and\n", + "neighbor encoder parameters (< 10% of weights for a 7B model), they can\n", + "enhance transformers with retrieval while only requiring 6M training sequences\n", + "(3% of pre-training sequences). RETRO-fitted models were able to surpass the\n", + "performance of baseline models and achieve performance close to that of RETRO\n", + "trained from scratch.\n", + "\n", + "![Performance from RETRO-fitting a pre-trained model](/assets/retrofit.jpg)\n", + "\n", + "Performance from RETRO-fitting a pre-trained model\n", + "([source](https://arxiv.org/abs/2112.04426))\n", + "\n", + "**[Internet-augmented LMs](https://arxiv.org/abs/2203.05115)** proposes using\n", + "a humble “off-the-shelf” search engine to augment LLMs. First, they retrieve a\n", + "set of relevant documents via Google Search. Since these retrieved documents\n", + "tend to be long (average length 2,056 words), they chunk them into paragraphs\n", + "of six sentences each. Finally, they embed the question and paragraphs via TF-\n", + "IDF and applied cosine similarity to rank the most relevant paragraphs for\n", + "each query.\n", + "\n", + "![Overview of internet-augmented LLMs](/assets/internet-llm.jpg)\n", + "\n", + "Overview of internet-augmented LLMs\n", + "([source](https://arxiv.org/abs/2203.05115))\n", + "\n", + "The retrieved paragraphs are used to condition the LLM via few-shot prompting.\n", + "They adopt the conventional \\\\(k\\\\)-shot prompting (\\\\(k=15\\\\)) from closed-\n", + "book QA (only providing question-answer pairs) and extend it with an evidence\n", + "paragraph, such that each context is an evidence, question, and answer\n", + "triplet.\n", + "\n", + "For the generator, they used Gopher, a 280B parameter model trained on 300B\n", + "tokens. For each question, they generated four candidate answers based on each\n", + "of the 50 retrieved paragraphs. Finally, they select the best answer by\n", + "estimating the answer probability via several methods including direct\n", + "inference, RAG, noisy channel inference, and Product-of-Experts (PoE). PoE\n", + "consistently performed the best.\n", + "\n", + "RAG has also been **applied to non-QA tasks such as code generation**. While\n", + "**[CodeT5+](https://arxiv.org/abs/2305.07922)** can be used as a standalone\n", + "generator, when combined with RAG, it significantly outperforms similar models\n", + "in code generation.\n", + "\n", + "To assess the impact of RAG on code generation, they evaluate the model in\n", + "three settings:\n", + "\n", + " * Retrieval-based: Fetch the top-1 code sample as the prediction\n", + " * Generative-only: Output code based on the decoder only\n", + " * Retrieval-augmented: Append top-1 code sample to encoder input before code generation via the decoder.\n", + "\n", + "![>Overview of RAG for CodeT5+](/assets/codet5.jpg)\n", + "\n", + "Overview of RAG for CodeT5+ ([source](https://arxiv.org/abs/2305.07922))\n", + "\n", + "As a qualitative example, they showed that retrieved code provides crucial\n", + "context (e.g., use `urllib3` for an HTTP request) and guides the generative\n", + "process towards more correct predictions. In contrast, the generative-only\n", + "approach returns incorrect output that only captures the concepts of\n", + "“download” and “compress”.\n", + "\n", + "**What if we don’t have relevance judgments for query-passage pairs?** Without\n", + "them, we would not be able to train the bi-encoders that embed the queries and\n", + "documents in the same embedding space where relevance is represented by the\n", + "inner product. **[Hypothetical document embeddings\n", + "(HyDE)](https://arxiv.org/abs/2212.10496)** suggests a solution.\n", + "\n", + "![Overview of HyDE](/assets/hyde.jpg)\n", + "\n", + "Overview of HyDE ([source](https://arxiv.org/abs/2212.10496))\n", + "\n", + "Given a query, HyDE first prompts an LLM, such as InstructGPT, to generate a\n", + "hypothetical document. Then, an unsupervised encoder, such as Contriver,\n", + "encodes the document into an embedding vector. Finally, the inner product is\n", + "computed between the _hypothetical_ document and the corpus, and the most\n", + "similar _real_ documents are retrieved.\n", + "\n", + "The expectation is that the encoder’s dense bottleneck serves as a lossy\n", + "compressor and the extraneous, non-factual details are excluded via the\n", + "embedding. This reframes the relevance modeling problem from a representation\n", + "learning task to a generation task.\n", + "\n", + "### How to apply RAG\n", + "\n", + "From experience with [Obsidian-Copilot](/writing/obsidian-copilot/), I’ve\n", + "found that hybrid retrieval (traditional search index + embedding-based\n", + "search) works better than either alone. There, I complemented classical\n", + "retrieval (BM25 via OpenSearch) with semantic search (`e5-small-v2`).\n", + "\n", + "Why not embedding-based search only? While it’s great in many instances, there\n", + "are situations where it falls short, such as:\n", + "\n", + " * Searching for a person or object’s name (e.g., Eugene, Kaptir 2.0)\n", + " * Searching for an acronym or phrase (e.g., RAG, RLHF)\n", + " * Searching for an ID (e.g., `gpt-3.5-turbo`, `titan-xlarge-v1.01`)\n", + "\n", + "But keyword search has its limitations too. It only models simple word\n", + "frequencies and doesn’t capture semantic or correlation information. Thus, it\n", + "doesn’t deal well with synonyms or hypernyms (i.e., words that represent a\n", + "generalization). This is where combining it with semantic search is\n", + "complementary.\n", + "\n", + "In addition, with a conventional search index, we can use metadata to refine\n", + "results. For example, we can use date filters to prioritize newer documents or\n", + "narrow our search to a specific time period. And if the search is related to\n", + "e-commerce, filters on average rating or categories are helpful. Finally,\n", + "having metadata is handy for downstream ranking, such as prioritizing\n", + "documents that are cited more, or boosting products by their sales volume.\n", + "\n", + "**With regard to embeddings** , the seemingly popular approach is to use\n", + "[`text-embedding-ada-002`](https://openai.com/blog/new-and-improved-embedding-\n", + "model). Its benefits include ease of use via an API and not having to maintain\n", + "our own embedding infra or self-host embedding models. Nonetheless, personal\n", + "experience and anecdotes from others suggest there are better alternatives for\n", + "retrieval.\n", + "\n", + "The OG embedding approaches include Word2vec and\n", + "[fastText](https://fasttext.cc). FastText is an open-source, lightweight\n", + "library that enables users to leverage pre-trained embeddings or train new\n", + "embedding models. It comes with pre-trained embeddings for 157 languages and\n", + "is extremely fast, even without a GPU. It’s my go-to for early-stage proof of\n", + "concepts.\n", + "\n", + "Another good baseline is [sentence-\n", + "transformers](https://github.com/UKPLab/sentence-transformers). It makes it\n", + "simple to compute embeddings for sentences, paragraphs, and even images. It’s\n", + "based on workhorse transformers such as BERT and RoBERTa and is available in\n", + "more than 100 languages.\n", + "\n", + "More recently, instructor models have shown SOTA performance. During training,\n", + "these models prepend the task description to the text. Then, when embedding\n", + "new text, we simply have to describe the task to get task-specific embeddings.\n", + "(Not that different from instruction tuning for embedding models IMHO.)\n", + "\n", + "An example is the [E5](https://arxiv.org/abs/2212.03533) family of models. For\n", + "open QA and information retrieval, we simply prepend documents in the index\n", + "with `passage:`, and prepend queries with `query:`. If the task is symmetric\n", + "(e.g., semantic similarity, paraphrase retrieval) or if we want to use\n", + "embeddings as features (e.g., classification, clustering), we just use the\n", + "`query:` prefix.\n", + "\n", + "The [Instructor](https://arxiv.org/abs/2212.09741) model takes it a step\n", + "further, allowing users to customize the prepended prompt: “Represent the\n", + "`domain` `task_type` for the `task_objective`:” For example, “Represent the\n", + "Wikipedia document for retrieval:”. (The domain and task objective are\n", + "optional). This brings the concept of prompt tuning into the field of text\n", + "embedding.\n", + "\n", + "Finally, as of Aug 1st, the top embedding model on the [MTEB\n", + "Leaderboard](https://huggingface.co/spaces/mteb/leaderboard) is the\n", + "[GTE](https://huggingface.co/thenlper/gte-large) family of models by Alibaba\n", + "DAMO Academy. The top performing model’s size is half of the next best model\n", + "`e5-large-v2` (0.67GB vs 1.34GB). In 2nd position is `gte-base` with a model\n", + "size of only 0.22GB and embedding dimension of 768. (H/T\n", + "[Nirant](https://twitter.com/NirantK).)\n", + "\n", + "To retrieve documents with low latency at scale, we use approximate nearest\n", + "neighbors (ANN). It optimizes for retrieval speed and returns the approximate\n", + "(instead of exact) top \\\\(k\\\\) most similar neighbors, trading off a little\n", + "accuracy loss for a large speed up.\n", + "\n", + "ANN embedding indices are data structures that let us do ANN searches\n", + "efficiently. At a high level, they build partitions over the embedding space\n", + "so we can quickly zoom in on the specific space where the query vector is.\n", + "Some popular techniques include:\n", + "\n", + " * [Locality Sensitive Hashing](https://en.wikipedia.org/wiki/Locality-sensitive_hashing) (LSH): The core idea is to create hash functions so that similar items are likely to end up in the same hash bucket. By only needing to check the relevant buckets, we can perform ANN queries efficiently.\n", + " * [Facebook AI Similarity Search](https://github.com/facebookresearch/faiss) (FAISS): It uses a combination of quantization and indexing for efficient retrieval, supports both CPU and GPU, and can handle billions of vectors due to its efficient use of memory.\n", + " * [Hierarchical Navigable Small Worlds](https://github.com/nmslib/hnswlib) (HNSW): Inspired by “six degrees of separation”, it builds a hierarchical graph structure that embodies the small world phenomenon. Here, most nodes can be reached from any other node via a minimum number of hops. This structure allows HNSW to initiate queries from broader, coarser approximations and progressively narrow the search at lower levels.\n", + " * [Scalable Nearest Neighbors](https://github.com/google-research/google-research/tree/master/scann) (ScaNN): It has a two-step process. First, coarse quantization reduces the search space. Then, fine-grained search is done within the reduced set. Best recall/latency trade-off I’ve seen.\n", + "\n", + "When evaluating an ANN index, some factors to consider include:\n", + "\n", + " * Recall: How does it fare against exact nearest neighbors?\n", + " * Latency/throughput: How many queries can it handle per second?\n", + " * Memory footprint: How much RAM is required to serve an index?\n", + " * Ease of adding new items: Can new items be added without having to reindex all documents (LSH) or does the index need to be rebuilt (ScaNN)?\n", + "\n", + "No single framework is better than all others in every aspect. Thus, start by\n", + "defining your functional and non-functional requirements before benchmarking.\n", + "Personally, I’ve found ScaNN to be outstanding in the recall-latency trade-off\n", + "(see benchmark graph [here](/writing/real-time-recommendations/#how-to-design-\n", + "and-implement-an-mvp)).\n", + "\n", + "## Fine-tuning: To get better at specific tasks\n", + "\n", + "Fine-tuning is the process of taking a pre-trained model (that has already\n", + "been trained with a vast amount of data) and further refining it on a specific\n", + "task. The intent is to harness the knowledge that the model has already\n", + "acquired during its pre-training and apply it to a specific task, usually\n", + "involving a smaller, task-specific, dataset.\n", + "\n", + "The term “fine-tuning” is used loosely and can refer to several concepts such\n", + "as:\n", + "\n", + " * **Continued pre-training** : With domain-specific data, apply the same pre-training regime (next token prediction, masked language modeling) on the base model.\n", + " * **Instruction fine-tuning** : The pre-trained (base) model is fine-tuned on examples of instruction-output pairs to follow instructions, answer questions, be waifu, etc.\n", + " * **Single-task fine-tuning** : The pre-trained model is honed for a narrow and specific task such as toxicity detection or summarization, similar to BERT and T5.\n", + " * **Reinforcement learning with human feedback (RLHF)** : This combines instruction fine-tuning with reinforcement learning. It requires collecting human preferences (e.g., pairwise comparisons) which are then used to train a reward model. The reward model is then used to further fine-tune the instructed LLM via RL techniques such as proximal policy optimization (PPO).\n", + "\n", + "We’ll mainly focus on single-task and instruction fine-tuning here.\n", + "\n", + "### Why fine-tuning?\n", + "\n", + "Fine-tuning an open LLM is becoming an increasingly viable alternative to\n", + "using a 3rd-party, cloud-based LLM for several reasons.\n", + "\n", + "**Performance & control:** Fine-tuning can improve the performance of an off-\n", + "the-shelf base model, and may even surpass a 3rd-party LLM. It also provides\n", + "greater control over LLM behavior, resulting in a more robust system or\n", + "product. Overall, fine-tuning enables us to build products that are\n", + "differentiated from simply using 3rd-party or open LLMs.\n", + "\n", + "**Modularization:** Single-task fine-tuning lets us to use an army of smaller\n", + "models that each specialize on their own tasks. Via this setup, a system can\n", + "be modularized into individual models for tasks like content moderation,\n", + "extraction, summarization, etc. Also, given that each model only has to focus\n", + "on a narrow set of tasks, we can get around the alignment tax, where fine-\n", + "tuning a model on one task reduces performance on other tasks.\n", + "\n", + "**Reduced dependencies:** By fine-tuning and hosting our own models, we can\n", + "reduce legal concerns about proprietary data (e.g., PII, internal documents\n", + "and code) being exposed to external APIs. It also gets around constraints that\n", + "come with 3rd-party LLMs such as rate-limiting, high costs, or overly\n", + "restrictive safety filters. By fine-tuning and hosting our own LLMs, we can\n", + "ensure data doesn’t leave our network, and can scale throughput as needed.\n", + "\n", + "### More about fine-tuning\n", + "\n", + "Why do we need to fine-tune a _base_ model? At the risk of oversimplifying,\n", + "base models are primarily optimized to predict the next word based on the\n", + "corpus they’re trained on. Hence, they aren’t naturally adept at following\n", + "instructions or answering questions. When posed a question, they tend to\n", + "respond with more questions. Thus, we perform instruction fine-tuning so they\n", + "learn to respond appropriately.\n", + "\n", + "However, fine-tuning isn’t without its challenges. First, we **need a\n", + "significant volume of demonstration data**. For instance, in the [InstructGPT\n", + "paper](https://arxiv.org/abs/2203.02155), they used 13k instruction-output\n", + "samples for supervised fine-tuning, 33k output comparisons for reward\n", + "modeling, and 31k prompts without human labels as input for RLHF.\n", + "\n", + "Furthermore, fine-tuning comes with an alignment tax—the process can lead to\n", + "**lower performance on certain critical tasks**. (There’s no free lunch after\n", + "all.) The same InstructGPT paper found that RLHF led to performance\n", + "regressions (relative to the GPT-3 base model) on public NLP tasks like SQuAD,\n", + "HellaSwag, and WMT 2015 French to English. (A workaround is to have several\n", + "smaller, specialized models that excel at narrow tasks.)\n", + "\n", + "Fine-tuning is similar to the concept of transfer learning. As defined in\n", + "Wikipedia: “Transfer learning is a technique in machine learning in which\n", + "knowledge learned from a task is re-used to boost performance on a related\n", + "task.” Several years ago, transfer learning made it easy for me to apply\n", + "ResNet models trained on ImageNet to [classify fashion\n", + "products](/writing/image-categorization-is-now-live/) and [build image\n", + "search](/writing/image-search-is-now-live/).\n", + "\n", + "**[ULMFit](https://arxiv.org/abs/1801.06146)** is one of the earlier papers to\n", + "apply transfer learning to text. They established the protocol of self-\n", + "supervised pre-training (on unlabeled data) followed by fine-tuning (on\n", + "labeled data). They used AWS-LSTM, an LSTM variant with dropout at various\n", + "gates.\n", + "\n", + "![Overview of ULMFit](/assets/ulmfit.jpg)\n", + "\n", + "Overview of ULMFit ([source](https://arxiv.org/abs/1801.06146))\n", + "\n", + "During pre-training (next word prediction), the model is trained on\n", + "wikitext-103 which contains 28.6 Wikipedia articles and 103M words. Then,\n", + "during target task fine-tuning, the LM is fine-tuned with data from the domain\n", + "of the specific task. Finally, during classifier fine-tuning, the model is\n", + "augmented with two additional linear blocks and fine-tuned on the target\n", + "classification tasks which includes sentiment analysis, question\n", + "classification, and topic classification.\n", + "\n", + "Since then, the pre-training followed by fine-tuning paradigm has driven much\n", + "progress in language modeling. **[Bidirectional Encoder Representations from\n", + "Transformers (BERT; encoder only)](https://arxiv.org/abs/1810.04805)** was\n", + "pre-trained on masked language modeling and next sentence prediction on\n", + "English Wikipedia and BooksCorpus. It was then fine-tuned on task-specific\n", + "inputs and labels for single-sentence classification, sentence pair\n", + "classification, single-sentence tagging, and question & answering.\n", + "\n", + "![Overview of BERT](/assets/bert.jpg)\n", + "\n", + "Overview of BERT ([source](https://arxiv.org/abs/1810.04805))\n", + "\n", + "**[Generative Pre-trained Transformers (GPT; decoder only)](https://s3-us-\n", + "west-2.amazonaws.com/openai-assets/research-covers/language-\n", + "unsupervised/language_understanding_paper.pdf)** was first pre-trained on\n", + "BooksCorpus via next token prediction. This was followed by single-task fine-\n", + "tuning for tasks such as text classification, textual entailment, similarity,\n", + "and Q&A. Interestingly, they found that including language modeling as an\n", + "auxiliary objective helped the model generalize and converge faster during\n", + "training.\n", + "\n", + "![Overview of GPT](/assets/gpt.jpg)\n", + "\n", + "Overview of GPT ([source](https://s3-us-west-2.amazonaws.com/openai-\n", + "assets/research-covers/language-unsupervised/language_understanding_paper.pd))\n", + "\n", + "**[Text-to-text Transfer Transformer (T5; encoder-\n", + "decoder)](https://arxiv.org/abs/1910.10683)** was pre-trained on the Colossal\n", + "Clean Crawled Corpus (C4), a cleaned version of the Common Crawl from April\n", + "2019. It employed the same denoising objective as BERT, namely masked language\n", + "modeling. It was then fine-tuned on tasks such as text classification,\n", + "abstractive summarization, Q&A, and machine translation.\n", + "\n", + "![Overview of T5](/assets/t5.jpg)\n", + "\n", + "Overview of T5 ([source](https://arxiv.org/abs/1910.10683))\n", + "\n", + "But unlike ULMFIt, BERT, and GPT which used different classifier heads for\n", + "downstream tasks, T5 represented downstream tasks as text-to-text only. For\n", + "example, a translation task would have input text starting with `Translation\n", + "English to German:`, while a summarization task might start with `Summarize:`\n", + "or `TL;DR:`. The prefix essentially became a hyperparameter (first instance of\n", + "prompt engineering?) This design choice allowed them to use a single fine-\n", + "tuned model across a variety of downstream tasks.\n", + "\n", + "**[InstructGPT](https://arxiv.org/abs/2203.02155)** expanded this idea of\n", + "single-task fine-tuning to instruction fine-tuning. The base model was GPT-3,\n", + "pre-trained on internet data including Common Crawl, WebText, Books, and\n", + "Wikipedia. It then applied supervised fine-tuning on demonstrations of desired\n", + "behavior (instruction and output). Next, it trained a reward model on the\n", + "dataset of comparisons. Finally, it optimized the instructed model against the\n", + "reward model via PPO, with this last stage focusing more on alignment than\n", + "specific task performance.\n", + "\n", + "![Overview of fine-tuning steps in InstructGPT](/assets/instructgpt.jpg)\n", + "\n", + "Overview of fine-tuning steps in InstructGPT\n", + "([source](https://arxiv.org/abs/2203.02155))\n", + "\n", + "Next, let’s move from fine-tuned models to fine-tuning techniques.\n", + "\n", + "**[Soft prompt tuning](https://arxiv.org/abs/2104.08691)** prepends a\n", + "trainable tensor to the model’s input embeddings, essentially creating a soft\n", + "prompt. Unlike discrete text prompts, soft prompts can be learned via\n", + "backpropagation, meaning they can be fine-tuned to incorporate signals from\n", + "any number of labeled examples.\n", + "\n", + "Next, there’s **[prefix tuning](https://arxiv.org/abs/2101.00190)**. Instead\n", + "of adding a soft prompt to the model input, it prepends trainable parameters\n", + "to the hidden states of all transformer blocks. During fine-tuning, the LM’s\n", + "original parameters are kept frozen while the prefix parameters are updated.\n", + "\n", + "![Overview of prefix-tuning](/assets/prefix.jpg)\n", + "\n", + "Overview of prefix-tuning ([source](https://arxiv.org/abs/2101.00190))\n", + "\n", + "The paper showed that this achieved performance comparable to full fine-tuning\n", + "despite requiring updates on just 0.1% of parameters. Moreover, in settings\n", + "with limited data and involved extrapolation to new topics, it outperformed\n", + "full fine-tuning. One hypothesis is that training fewer parameters helped\n", + "reduce overfitting on smaller target datasets.\n", + "\n", + "There’s also the **[adapter](https://arxiv.org/abs/1902.00751)** technique.\n", + "This method adds fully connected network layers twice to each transformer\n", + "block, after the attention layer and after the feed-forward network layer. On\n", + "GLUE, it’s able to achieve within 0.4% of the performance of full fine-tuning\n", + "by just adding 3.6% parameters per task.\n", + "\n", + "![Overview of adapters](/assets/adapter.jpg)\n", + "\n", + "Overview of adapters ([source](https://arxiv.org/abs/1902.00751))\n", + "\n", + "**[Low-Rank Adaptation (LoRA)](https://arxiv.org/abs/2106.09685)** is a\n", + "technique where adapters are designed to be the product of two low-rank\n", + "matrices. It was inspired by [Aghajanyan et\n", + "al.](https://arxiv.org/abs/2012.13255) which showed that, when adapting to a\n", + "specific task, pre-trained language models have a low intrinsic dimension and\n", + "can still learn efficiently despite a random projection into a smaller\n", + "subspace. Thus, LoRA hypothesized that weight updates during adaption also\n", + "have low intrinsic rank.\n", + "\n", + "![Overview of LoRA](/assets/lora.jpg)\n", + "\n", + "Overview of LoRA ([source](https://arxiv.org/abs/2106.09685))\n", + "\n", + "Similar to prefix tuning, they found that LoRA outperformed several baselines\n", + "including full fine-tuning. Again, the hypothesis is that LoRA, thanks to its\n", + "reduced rank, provides implicit regularization. In contrast, full fine-tuning,\n", + "which updates all weights, could be prone to overfitting.\n", + "\n", + "**[QLoRA](https://arxiv.org/abs/2305.14314)** builds on the idea of LoRA. But\n", + "instead of using the full 16-bit model during fine-tuning, it applies a 4-bit\n", + "quantized model. It introduced several innovations such as 4-bit NormalFloat\n", + "(to quantize models), double quantization (for additional memory savings), and\n", + "paged optimizers (that prevent OOM errors by transferring data to CPU RAM when\n", + "the GPU runs out of memory).\n", + "\n", + "![Overview of QLoRA](/assets/qlora.jpg)\n", + "\n", + "Overview of QLoRA ([source](https://arxiv.org/abs/2305.14314))\n", + "\n", + "As a result, QLoRA reduces the average memory requirements for fine-tuning a\n", + "65B model from > 780GB memory to a more manageable 48B without degrading\n", + "runtime or predictive performance compared to a 16-bit fully fine-tuned\n", + "baseline.\n", + "\n", + "(Fun fact: During a meetup with Tim Dettmers, an author of QLoRA, he quipped\n", + "that double quantization was “a bit of a silly idea but works perfectly.” Hey,\n", + "if it works, it works.)\n", + "\n", + "### How to apply fine-tuning?\n", + "\n", + "The first step is to **collect demonstration data/labels**. These could be for\n", + "straightforward tasks such as document classification, entity extraction, or\n", + "summarization, or they could be more complex such as Q&A or dialogue. Some\n", + "ways to collect this data include:\n", + "\n", + " * **Via experts or crowd-sourced human annotators** : While this is expensive and slow, it usually leads to higher-quality data with [good guidelines](/writing/labeling-guidelines/).\n", + " * **Via user feedback** : This can be as simple as asking users to select attributes that describe a product, rating LLM responses with thumbs up or down (e.g., ChatGPT), or logging which images users choose to download (e.g., Midjourney).\n", + " * **Query larger open models with permissive licenses** : With prompt engineering, we might be able to elicit reasonable demonstration data from a larger model (Falcon 40B Instruct) that can be used to fine-tune a smaller model.\n", + " * **Reuse open-source data** : If your task can be framed as a natural language inference (NLI) task, we could fine-tune a model to perform NLI using [MNLI data](https://cims.nyu.edu/~sbowman/multinli/). Then, we can continue fine-tuning the model on internal data to classify inputs as entailment, neutral, or contradiction.\n", + "\n", + "Note: Some LLM terms prevent users from using their output to develop other\n", + "models.\n", + "\n", + " * [OpenAI Terms of Use](https://openai.com/policies/terms-of-use) (Section 2c, iii): You may not use output from the Services to develop models that compete with OpenAI.\n", + " * [LLaMA 2 Community License Agreement](https://ai.meta.com/llama/license/) (Section 1b-v): You will not use the Llama Materials or any output or results of the Llama Materials to improve any other large language model (excluding Llama 2 or derivative works thereof).\n", + "\n", + "The next step is to **define evaluation metrics**. We’ve discussed this in a\n", + "previous section.\n", + "\n", + "Then, **select a pre-trained model.** There are [several open LLMs with\n", + "permissive licenses](https://github.com/eugeneyan/open-llms) to choose from.\n", + "Excluding Llama 2 (since it isn’t fully commercial use), Falcon-40B is known\n", + "to be the best-performing model. Nonetheless, I’ve found it unwieldy to fine-\n", + "tune and serve in production given how heavy it is.\n", + "\n", + "Instead, I’m inclined to use smaller models like the Falcon-7B. And if we can\n", + "simplify and frame the task more narrowly, BERT (340M params), RoBERTA (355M\n", + "params), and BART (406M params) are solid picks for classification and natural\n", + "language inference tasks. Beyond that, Flan-T5 (770M and 3B variants) is a\n", + "reliable baseline for translation, abstractive summarization, headline\n", + "generation, etc.\n", + "\n", + "We may also need to **update the model architecture** , such as when the pre-\n", + "trained model’s architecture doesn’t align with the task. For example, we\n", + "might need to update the classification heads on BERT or T5 to match our task.\n", + "Tip: If the task is a simple binary classification task, NLI models can work\n", + "out of the box. Entailment is mapped to positive, contradiction is mapped to\n", + "negative, while the neural label can indicate uncertainty.\n", + "\n", + "**Then, pick a fine-tuning approach.** LoRA and QLoRA are good places to\n", + "start. But if your fine-tuning is more intensive, such as continued pre-\n", + "training on new domain knowledge, you may find full fine-tuning necessary.\n", + "\n", + "**Finally, basic hyperparameter tuning.** Generally, most papers focus on\n", + "learning rate, batch size, and number of epochs (see LoRA, QLoRA). And if\n", + "we’re using LoRA, we might want to tune the rank parameter (though the QLoRA\n", + "paper found that different rank and alpha led to similar results). Other\n", + "hyperparameters include input sequence length, loss type (contrastive loss vs.\n", + "token match), and data ratios (like the mix of pre-training or demonstration\n", + "data, or the ratio of positive to negative examples, among others).\n", + "\n", + "## Caching: To reduce latency and cost\n", + "\n", + "Caching is a technique to store data that has been previously retrieved or\n", + "computed. This way, future requests for the same data can be served faster. In\n", + "the space of serving LLM generations, the popularized approach is to cache the\n", + "LLM response keyed on the embedding of the input request. Then, for each new\n", + "request, if a semantically similar request is received, we can serve the\n", + "cached response.\n", + "\n", + "For some practitioners, this sounds like “[a disaster waiting to\n", + "happen.](https://twitter.com/HanchungLee/status/1681146845186363392)” I’m\n", + "inclined to agree. Thus, I think the key to adopting this pattern is figuring\n", + "out how to cache safely, instead of solely depending on semantic similarity.\n", + "\n", + "### Why caching?\n", + "\n", + "Caching can significantly reduce latency for responses that have been served\n", + "before. In addition, by eliminating the need to compute a response for the\n", + "same input again and again, we can reduce the number of LLM requests and thus\n", + "save cost. Also, there are certain use cases that do not support latency on\n", + "the order of seconds. Thus, pre-computing and caching may be the only way to\n", + "serve those use cases.\n", + "\n", + "### More about caching\n", + "\n", + "A cache is a high-speed storage layer that stores a subset of data that’s\n", + "accessed more frequently. This lets us serve these requests faster via the\n", + "cache instead of the data’s primary storage (e.g., search index, relational\n", + "database). Overall, caching enables efficient reuse of previously fetched or\n", + "computed data. (More about [caching](https://aws.amazon.com/caching/) and\n", + "[best practices](https://aws.amazon.com/caching/best-practices/).)\n", + "\n", + "An example of caching for LLMs is\n", + "[GPTCache](https://github.com/zilliztech/GPTCache).\n", + "\n", + "![Overview of GPTCache](/assets/gptcache.jpg)\n", + "\n", + "Overview of GPTCache ([source](https://github.com/zilliztech/GPTCache))\n", + "\n", + "When a new request is received:\n", + "\n", + " * Embedding generator: This embeds the request via various models such as OpenAI’s `text-embedding-ada-002`, FastText, Sentence Transformers, and more.\n", + " * Similarity evaluator: This computes the similarity of the request via the vector store and then provides a distance metric. The vector store can either be local (FAISS, Hnswlib) or cloud-based. It can also compute similarity via a model.\n", + " * Cache storage: If the request is similar, the cached response is fetched and served.\n", + " * LLM: If the request isn’t similar enough, it gets passed to the LLM which then generates the result. Finally, the response is served and cached for future use.\n", + "\n", + "Redis also shared a [similar\n", + "example](https://www.youtube.com/live/9VgpXcfJYvw?feature=share&t=1517),\n", + "mentioning that some teams go as far as precomputing all the queries they\n", + "anticipate receiving. Then, they set a similarity threshold on which queries\n", + "are similar enough to warrant a cached response.\n", + "\n", + "### How to apply caching?\n", + "\n", + "**We should start with having a good understanding of user request patterns**.\n", + "This allows us to design the cache thoughtfully so it can be applied reliably.\n", + "\n", + "First, let’s consider a non-LLM example. Imagine we’re caching product prices\n", + "for an e-commerce site. During checkout, is it safe to display the (possibly\n", + "outdated) cached price? Probably not, since the price the customer sees during\n", + "checkout should be the same as the final amount they’re charged. Caching isn’t\n", + "appropriate here as we need to ensure consistency for the customer.\n", + "\n", + "Now, bringing it back to LLM responses. Imagine we get a request for a summary\n", + "of “Mission Impossible 2” that’s semantically similar enough to “Mission\n", + "Impossible 3”. If we’re looking up cache based on semantic similarity, we\n", + "could serve the wrong response.\n", + "\n", + "We also need to **consider if caching is effective for the usage pattern.**\n", + "One way to quantify this is via the cache hit rate (percentage of requests\n", + "served directly from the cache). If the usage pattern is uniformly random, the\n", + "cache would need frequent updates. Thus, the effort to keep the cache up-to-\n", + "date could negate any benefit a cache has to offer. On the other hand, if the\n", + "usage follows a power law where a small proportion of unique requests account\n", + "for the majority of traffic (e.g., search queries, product views), then\n", + "caching could be an effective strategy.\n", + "\n", + "Beyond semantic similarity, we could also explore caching based on:\n", + "\n", + " * **Item IDs:** This applies when we pre-compute [summaries of product reviews](https://www.cnbc.com/2023/06/12/amazon-is-using-generative-ai-to-summarize-product-reviews.html) or generate a summary for an entire movie trilogy.\n", + " * **Pairs of Item IDs:** Such as when we generate comparisons between two movies. While this appears to be \\\\(O(N^2)\\\\), in practice, a small number of combinations drive the bulk of traffic, such as comparison between popular movies in a series or genre.\n", + " * **Constrained input:** Such as variables like movie genre, director, or lead actor. For example, if a user is looking for movies by a specific director, we could execute a structured query and run it through an LLM to frame the response more eloquently. Another example is [generating code based on drop-down options](https://cheatlayer.com)—if the code has been verified to work, we can cache it for reliable reuse.\n", + "\n", + "Also, **caching doesn’t only have to occur on-the-fly.** As Redis shared, we\n", + "can pre-compute LLM generations offline or asynchronously before serving them.\n", + "By serving from a cache, we shift the latency from generation (typically\n", + "seconds) to cache lookup (milliseconds). Pre-computing in batch can also help\n", + "reduce cost relative to serving in real-time.\n", + "\n", + "While the approaches listed here may not be as flexible as semantically\n", + "caching on natural language inputs, I think it provides a good balance between\n", + "efficiency and reliability.\n", + "\n", + "## Guardrails: To ensure output quality\n", + "\n", + "In the context of LLMs, guardrails validate the output of LLMs, ensuring that\n", + "the output doesn’t just sound good but is also syntactically correct, factual,\n", + "and free from harmful content. It also includes guarding against adversarial\n", + "input.\n", + "\n", + "### Why guardrails?\n", + "\n", + "First, they help ensure that model outputs are reliable and consistent enough\n", + "to use in production. For example, we may require output to be in a specific\n", + "JSON schema so that it’s machine-readable, or we need code generated to be\n", + "executable. Guardrails can help with such syntactic validation.\n", + "\n", + "Second, they provide an additional layer of safety and maintain quality\n", + "control over an LLM’s output. For example, to verify if the content generated\n", + "is appropriate for serving, we may want to check that the output isn’t\n", + "harmful, verify it for factual accuracy, or ensure coherence with the context\n", + "provided.\n", + "\n", + "### More about guardrails\n", + "\n", + "**One approach is to control the model’s responses via prompts.** For example,\n", + "Anthropic shared about prompts designed to guide the model toward generating\n", + "responses that are [helpful, harmless, and\n", + "honest](https://arxiv.org/abs/2204.05862) (HHH). They found that Python fine-\n", + "tuning with the HHH prompt led to better performance compared to fine-tuning\n", + "with RLHF.\n", + "\n", + "![Example of HHH prompt](/assets/hhh.jpg)\n", + "\n", + "Example of HHH prompt ([source](https://arxiv.org/abs/2204.05862))\n", + "\n", + "**A more common approach is to validate the output.** An example is the\n", + "[Guardrails package](https://github.com/ShreyaR/guardrails). It allows users\n", + "to add structural, type, and quality requirements on LLM outputs via Pydantic-\n", + "style validation. And if the check fails, it can trigger corrective action\n", + "such as filtering on the offending output or regenerating another response.\n", + "\n", + "Most of the validation logic is in\n", + "[`validators.py`](https://github.com/ShreyaR/guardrails/blob/main/guardrails/validators.py).\n", + "It’s interesting to see how they’re implemented. Broadly speaking, its\n", + "validators fall into the following categories:\n", + "\n", + " * Single output value validation: This includes ensuring that the output (i) is one of the predefined choices, (ii) has a length within a certain range, (iii) if numeric, falls within an expected range, and (iv) is a complete sentence.\n", + " * Syntactic checks: This includes ensuring that generated URLs are valid and reachable, and that Python and SQL code is bug-free.\n", + " * Semantic checks: This verifies that the output is aligned with the reference document, or that the extractive summary closely matches the source document. These checks can be done via cosine similarity or fuzzy matching techniques.\n", + " * Safety checks: This ensures that the generated output is free of inappropriate language or that the quality of translated text is high.\n", + "\n", + "Nvidia’s [NeMo-Guardrails](https://github.com/NVIDIA/NeMo-Guardrails) follows\n", + "a similar principle but is designed to guide LLM-based conversational systems.\n", + "Rather than focusing on syntactic guardrails, it emphasizes semantic ones.\n", + "This includes ensuring that the assistant steers clear of politically charged\n", + "topics, provides factually correct information, and can detect jailbreaking\n", + "attempts.\n", + "\n", + "Thus, NeMo’s approach is somewhat different: Instead of using more\n", + "deterministic checks like verifying if a value exists in a list or inspecting\n", + "code for syntax errors, NeMo leans heavily on using another LLM to validate\n", + "outputs (inspired by [SelfCheckGPT](https://arxiv.org/abs/2303.08896)).\n", + "\n", + "In their example for fact-checking and preventing hallucination, they ask the\n", + "LLM itself to check whether the most recent output is consistent with the\n", + "given context. To fact-check, the LLM is queried if the response is true based\n", + "on the documents retrieved from the knowledge base. To prevent hallucinations,\n", + "since there isn’t a knowledge base available, they get the LLM to generate\n", + "multiple alternative completions which serve as the context. The underlying\n", + "assumption is that if the LLM produces multiple completions that disagree with\n", + "one another, the original completion is likely a hallucination.\n", + "\n", + "The moderation example follows a similar approach: The response is screened\n", + "for harmful and unethical content via an LLM. Given the nuance of ethics and\n", + "harmful content, heuristics and conventional machine learning techniques fall\n", + "short. Thus, an LLM is required for a deeper understanding of the intent and\n", + "structure of dialogue.\n", + "\n", + "Apart from using guardrails to verify the output of LLMs, we can also\n", + "**directly steer the output to adhere to a specific grammar.** An example of\n", + "this is Microsoft’s [Guidance](https://github.com/microsoft/guidance). Unlike\n", + "Guardrails which [imposes JSON schema via a\n", + "prompt](https://github.com/ShreyaR/guardrails/blob/main/guardrails/constants.xml#L14),\n", + "Guidance enforces the schema by injecting tokens that make up the structure.\n", + "\n", + "We can think of Guidance as a domain-specific language for LLM interactions\n", + "and output. It draws inspiration from [Handlebars](https://handlebarsjs.com),\n", + "a popular templating language used in web applications that empowers users to\n", + "perform variable interpolation and logical control.\n", + "\n", + "However, Guidance sets itself apart from regular templating languages by\n", + "executing linearly. This means it maintains the order of tokens generated.\n", + "Thus, by inserting tokens that are part of the structure—instead of relying on\n", + "the LLM to generate them correctly—Guidance can dictate the specific output\n", + "format. In their examples, they show how to [generate JSON that’s always\n", + "valid](https://github.com/microsoft/guidance#guaranteeing-valid-syntax-json-\n", + "example-notebook), [generate complex output\n", + "formats](https://github.com/microsoft/guidance#rich-output-structure-example-\n", + "notebook) with multiple keys, ensure that LLMs [play the right\n", + "roles](https://github.com/microsoft/guidance#role-based-chat-model-example-\n", + "notebook), and have [agents interact with each\n", + "other](https://github.com/microsoft/guidance#agents-notebook).\n", + "\n", + "They also introduced a concept called [token\n", + "healing](https://github.com/microsoft/guidance#token-healing-notebook), a\n", + "useful feature that helps avoid subtle bugs that occur due to tokenization. In\n", + "simple terms, it rewinds the generation by one token before the end of the\n", + "prompt and then restricts the first generated token to have a prefix matching\n", + "the last token in the prompt. This eliminates the need to fret about token\n", + "boundaries when crafting prompts.\n", + "\n", + "### How to apply guardrails?\n", + "\n", + "Though the concept of guardrails for LLMs in industry is still nascent, there\n", + "are a handful of immediately useful and practical strategies we can consider.\n", + "\n", + "**Structural guidance:** Apply guidance whenever possible. It provides direct\n", + "control over outputs and offers a more precise method to ensure that output\n", + "conforms to a specific structure or format.\n", + "\n", + "**Syntactic guardrails:** These include checking if categorical output is\n", + "within a set of acceptable choices, or if numeric output is within an expected\n", + "range. Also, if we generate SQL, these can verify its free from syntax errors\n", + "and also ensure that all columns in the query match the schema. Ditto for\n", + "generating code (e.g., Python, JavaScript).\n", + "\n", + "**Content safety guardrails:** These verify that the output has no harmful or\n", + "inappropriate content. It can be as simple as checking against the [List of\n", + "Dirty, Naughty, Obscene, and Otherwise Bad\n", + "Words](https://github.com/LDNOOBW/List-of-Dirty-Naughty-Obscene-and-Otherwise-\n", + "Bad-Words) or using [profanity detection](https://pypi.org/project/profanity-\n", + "check/) models. (It’s [common to run moderation classifiers on\n", + "output](https://twitter.com/goodside/status/1685023251532320768).) More\n", + "complex and nuanced output can rely on an LLM evaluator.\n", + "\n", + "**Semantic/factuality guardrails:** These confirm that the output is\n", + "semantically relevant to the input. Say we’re generating a two-sentence\n", + "summary of a movie based on its synopsis. We can validate if the produced\n", + "summary is semantically similar to the output, or have (another) LLM ascertain\n", + "if the summary accurately represents the provided synopsis.\n", + "\n", + "**Input guardrails:** These limit the types of input the model will respond\n", + "to, helping to mitigate the risk of the model responding to inappropriate or\n", + "adversarial prompts which would lead to generating harmful content. For\n", + "example, you’ll get an error if you ask Midjourney to generate NSFW content.\n", + "This can be as straightforward as comparing against a list of strings or using\n", + "a moderation classifier.\n", + "\n", + "![An example of an input guardrail on Midjourney](/assets/input-guardrail.jpg)\n", + "\n", + "An example of an input guardrail on Midjourney\n", + "\n", + "## Defensive UX: To anticipate & handle errors gracefully\n", + "\n", + "Defensive UX is a design strategy that acknowledges that bad things, such as\n", + "inaccuracies or hallucinations, can happen during user interactions with\n", + "machine learning or LLM-based products. Thus, the intent is to anticipate and\n", + "manage these in advance, primarily by guiding user behavior, averting misuse,\n", + "and handling errors gracefully.\n", + "\n", + "### Why defensive UX?\n", + "\n", + "Machine learning and LLMs aren’t perfect—they can produce inaccurate output.\n", + "Also, they respond differently to the same input over time, such as search\n", + "engines displaying varying results due to personalization, or LLMs generating\n", + "diverse output on more creative, higher temperature, settings. This can\n", + "violate the principle of consistency which advocates for a consistent UI and\n", + "predictable behaviors.\n", + "\n", + "Defensive UX can help mitigate the above by providing:\n", + "\n", + " * **Increased accessibility** : By helping users understand how ML/LLM features work and their limitations, defensive UX makes it more accessible and user-friendly.\n", + " * **Increased trust** : When users see that the feature can handle difficult scenarios gracefully and doesn’t produce harmful output, they’re likely to trust it more.\n", + " * **Better UX** : By designing the system and UX to handle ambiguous situations and errors, defensive UX paves the way for a smoother, more enjoyable user experience.\n", + "\n", + "### More about defensive UX\n", + "\n", + "To learn more about defensive UX, we can look at Human-AI guidelines from\n", + "Microsoft, Google, and Apple.\n", + "\n", + "**Microsoft’s[Guidelines for Human-AI\n", + "Interaction](https://www.microsoft.com/en-us/research/publication/guidelines-\n", + "for-human-ai-interaction/)** is based on a survey of 168 potential guidelines.\n", + "These were collected from internal and external industry sources, academic\n", + "literature, and public articles. After combining guidelines that were similar,\n", + "filtering guidelines that were too vague or too specific or not AI-specific,\n", + "and a round of heuristic evaluation, they narrowed it down to 18 guidelines.\n", + "\n", + "![Guidelines for Human-AI interaction across the user journey](/assets/ms-\n", + "guidelines.jpg)\n", + "\n", + "Guidelines for Human-AI interaction across the user journey\n", + "([source](https://www.microsoft.com/en-us/research/project/guidelines-for-\n", + "human-ai-interaction/))\n", + "\n", + "These guidelines follow a certain style: Each one is a succinct action rule of\n", + "3 - 10 words, beginning with a verb. Each rule is accompanied by a one-liner\n", + "that addresses potential ambiguities. They are organized based on their likely\n", + "application during user interaction:\n", + "\n", + " * Initially: Make clear what the system can do (G1), make clear how well the system can do what it can do (G2)\n", + " * During interaction: Time services based on context (G3), mitigate social biases (G6)\n", + " * When wrong: Support efficient dismissal (G8), support efficient correction (G9)\n", + " * Over time: Learn from user behavior (G13), provide global controls (G17)\n", + "\n", + "**Google’s[People + AI Guidebook](https://pair.withgoogle.com/guidebook/)** is\n", + "rooted in data and insights drawn from Google’s product team and academic\n", + "research. In contrast to Microsoft’s guidelines which are organized around the\n", + "user, Google organizes its guidelines into concepts that a developer needs to\n", + "keep in mind.\n", + "\n", + "There are 23 patterns grouped around common questions that come up during the\n", + "product development process, including:\n", + "\n", + " * How do I get started with human-centered AI: Determine if the AI adds value, invest early in good data practices (e.g., evals)\n", + " * How do I onboard users to new AI features: Make it safe to explore, anchor on familiarity, automate in phases\n", + " * How do I help users build trust in my product: Set the right expectations, be transparent, automate more when the risk is low.\n", + "\n", + "**Apple’s[Human Interface Guidelines for Machine\n", + "Learning](https://developer.apple.com/design/human-interface-\n", + "guidelines/machine-learning)** differs from the bottom-up approach of academic\n", + "literature and user studies. Instead, its primary source is practitioner\n", + "knowledge and experience. Thus, it doesn’t include many references or data\n", + "points, but instead focuses on Apple’s longstanding design principles. This\n", + "results in a unique perspective that distinguishes it from the other two\n", + "guidelines.\n", + "\n", + "The document focuses on how Apple’s design principles can be applied to ML-\n", + "infused products, emphasizing aspects of UI rather than model functionality.\n", + "It starts by asking developers to consider the role of ML in their app and\n", + "work backwards from the user experience. This includes questions such as\n", + "whether ML is:\n", + "\n", + " * Critical or complementary: For example, Face ID cannot work without ML but the keyboard can still work without QuickType.\n", + " * Proactive or reactive: Siri Suggestions are proactive while autocorrect is reactive.\n", + " * Dynamic or static: Recommendations are dynamic while object detection in Photos only improves with each iOS release.\n", + "\n", + "It then delves into several patterns, split into inputs and outputs of a\n", + "system. Inputs focus on explicit feedback, implicit feedback, calibration, and\n", + "corrections. This section guides the design for how AI products request and\n", + "process user data and interactions. Outputs focus on mistakes, multiple\n", + "options, confidence, attribution, and limitations. The intent is to ensure the\n", + "model’s output is presented in a comprehensible and useful manner.\n", + "\n", + "The differences between the three guidelines are insightful. Google has more\n", + "emphasis on considerations for training data and model development, likely due\n", + "to its engineering-driven culture. Microsoft has more focus on mental models,\n", + "likely an artifact of the HCI academic study. Lastly, Apple’s approach centers\n", + "around providing a seamless UX, a focus likely influenced by its cultural\n", + "values and principles.\n", + "\n", + "### How to apply defensive UX?\n", + "\n", + "Here are some patterns based on the guidelines above. (Disclaimer: I’m not a\n", + "designer.)\n", + "\n", + "**Set the right expectations.** This principle is consistent across all three\n", + "guidelines:\n", + "\n", + " * Microsoft: Make clear how well the system can do what it can do (help the user understand how often the AI system may make mistakes)\n", + " * Google: Set the right expectations (be transparent with your users about what your AI-powered product can and cannot do)\n", + " * Apple: Help people establish realistic expectations (describe the limitation in marketing material or within the feature’s context)\n", + "\n", + "This can be as simple as adding a brief disclaimer above AI-generated results,\n", + "like those of Bard, or highlighting our app’s limitations on its landing page,\n", + "like how ChatGPT does it.\n", + "\n", + "![Example of a disclaimer on Google Bard results \\(Note: The code provided\n", + "will not work.\\)](/assets/bard-disclaimer.png)\n", + "\n", + "Example of a disclaimer on Google Bard results (Note: `nrows` is not a valid\n", + "argument.)\n", + "\n", + "By being transparent about our product’s capabilities and limitations, we help\n", + "users calibrate their expectations about its functionality and output. While\n", + "this may cause users to trust it less in the short run, it helps foster trust\n", + "in the long run—users are less likely to overestimate our product and\n", + "subsequently face disappointment.\n", + "\n", + "**Enable efficient dismissal.** This is explicitly mentioned as Microsoft’s\n", + "Guideline 8: Support efficient dismissal (make it easy to dismiss or ignore\n", + "undesired AI system services).\n", + "\n", + "For example, if a user is navigating our site and a chatbot pops up asking if\n", + "they need help, it should be easy for the user to dismiss the chatbot. This\n", + "ensures the chatbot doesn’t get in the way, especially on devices with smaller\n", + "screens. Similarly, GitHub Copilot allows users to conveniently ignore its\n", + "code suggestions by simply continuing to type. While this may reduce usage of\n", + "the AI feature in the short term, it prevents it from becoming a nuisance and\n", + "potentially reducing customer satisfaction in the long term.\n", + "\n", + "**Provide attribution.** This is listed in all three guidelines:\n", + "\n", + " * Microsoft: Make clear why the system did what it did (enable the user to access an explanation of why the AI system behaved as it did)\n", + " * Google: Add context from human sources (help users appraise your recommendations with input from 3rd-party sources)\n", + " * Apple: Consider using attributions to help people distinguish among results\n", + "\n", + "Citations are becoming an increasingly common design element. Take BingChat\n", + "for example. When we make a query, it includes citations, usually from\n", + "reputable sources, in its responses. This not only shows where the information\n", + "came from, but also allows users to assess the quality of the sources.\n", + "Similarly, imagine we’re using an LLM to explain why a user might like a\n", + "product. Alongside the LLM-generated explanation, we could include a quote\n", + "from an actual review or mention the product rating.\n", + "\n", + "Context from experts and the community also enhances user trust. For example,\n", + "if a user is seeking recommendations for a hiking trail, mentioning that a\n", + "suggested trail comes highly recommended by the relevant community can go a\n", + "long way. It not only adds value to the recommendation but also helps users\n", + "calibrate trust through the human connection.\n", + "\n", + "![Example of attribution via social proof](/assets/social-proof.jpg)\n", + "\n", + "Example of attribution via social proof\n", + "([source](https://pair.withgoogle.com/guidebook/patterns))\n", + "\n", + "Finally, Apple’s guidelines include popular attributions such as “Because\n", + "you’ve read non-fiction”, “New books by authors you’ve read”. These\n", + "descriptors not only personalize the experience but also provide context,\n", + "enhancing user understanding and trust.\n", + "\n", + "**Anchor on familiarity.** When introducing users to a new AI product or\n", + "feature, it helps to guide them with familiar UX patterns and features. This\n", + "makes it easier for users to focus on the main task and start to earn customer\n", + "trust in our new product. Resist the temptation to showcase new and “magical”\n", + "features via exotic UI elements.\n", + "\n", + "Along a similar vein, chat-based features are becoming more common due to\n", + "ChatGPT’s growing popularity. For example, chat with your docs, chat to query\n", + "your data, chat to buy groceries. However, I [question whether chat is the\n", + "right UX](/writing/llm-ux/) for most user experiences—it just takes too much\n", + "effort relative to the familiar UX of clicking on text and images.\n", + "\n", + "Furthermore, increasing user effort leads to higher expectations that are\n", + "harder to meet. Netflix shared that users have [higher expectations for\n", + "recommendations](https://slideslive.com/38934788/a-human-perspective-on-\n", + "algorithmic-similarity?ref=folder-59726) that result from explicit actions\n", + "such as search. In general, the more effort a user puts in (e.g., chat,\n", + "search), the higher the expectations they have. Contrast this with lower-\n", + "effort interactions such as scrolling over recommendations slates or clicking\n", + "on a product.\n", + "\n", + "Thus, while chat offers more flexibility, it also demands more user effort.\n", + "Moreover, using a chat box is less intuitive as it lacks signifiers on how\n", + "users can adjust the output. Overall, I think that sticking with a familiar\n", + "and constrained UI makes it easier for users to navigate our product; chat\n", + "should only be considered as a secondary or tertiary option.\n", + "\n", + "## Collect user feedback: To build our data flywheel\n", + "\n", + "Gathering user feedback allows us to learn their preferences. Specific to LLM\n", + "products, user feedback contributes to building evals, fine-tuning, and\n", + "guardrails. If we think about it, data—such as corpus for pre-training,\n", + "expert-crafted demonstrations, human preferences for reward modeling—is one of\n", + "the few moats for LLM products. Thus, we want to be deliberately thinking\n", + "about collecting user feedback when designing our UX.\n", + "\n", + "Feedback can be explicit or implicit. Explicit feedback is information users\n", + "provide in response to a request by our product; implicit feedback is\n", + "information we learn from user interactions without needing users to\n", + "deliberately provide feedback.\n", + "\n", + "### Why collect user feedback\n", + "\n", + "User feedback **helps our models improve**. By learning what users like,\n", + "dislike, or complain about, we can improve our models to better meet their\n", + "needs. It also allows us to **adapt to individual preferences**.\n", + "Recommendation systems are a prime example. As users interact with items, we\n", + "learn what they like and dislike and better cater to their tastes over time.\n", + "\n", + "In addition, the feedback loop helps us **evaluate our system’s overall\n", + "performance**. While evals can help us measure model/system performance, user\n", + "feedback offers a concrete measure of user satisfaction and product\n", + "effectiveness.\n", + "\n", + "### How to collect user feedback\n", + "\n", + "**Make it easy for users to provide feedback.** This is echoed across all\n", + "three guidelines:\n", + "\n", + " * Microsoft: Encourage granular feedback (enable the user to provide feedback indicating their preferences during regular interaction with the AI system)\n", + " * Google: Let users give feedback (give users the opportunity for real-time teaching, feedback, and error correction)\n", + " * Apple: Provide actionable information your app can use to improve the content and experience it presents to people\n", + "\n", + "ChatGPT is one such example. Users can indicate thumbs up/down on responses,\n", + "or choose to regenerate a response if it’s really bad or unhelpful. This is\n", + "useful feedback on human preferences which can then be used to fine-tune LLMs.\n", + "\n", + "Midjourney is another good example. After images are generated, users can\n", + "generate a new set of images (negative feedback), tweak an image by asking for\n", + "a variation (positive feedback), or upscale and download the image (strong\n", + "positive feedback). This enables Midjourney to gather rich comparison data on\n", + "the outputs generated.\n", + "\n", + "![>Example of collecting user feedback as part of the\n", + "UX](/assets/midjourney.jpg)\n", + "\n", + "Example of collecting user feedback as part of the UX\n", + "\n", + "**Consider implicit feedback too.** Implicit feedback is information that\n", + "arises as users interact with our product. Unlike the specific responses we\n", + "get from explicit feedback, implicit feedback can provide a wide range of data\n", + "on user behavior and preferences.\n", + "\n", + "Copilot-like assistants are a prime example. Users indicate whether a\n", + "suggestion was helpful by either wholly accepting it (strong positive\n", + "feedback), accepting and making minor tweaks (positive feedback), or ignoring\n", + "it (neutral/negative feedback). Alternatively, they may update the comment\n", + "that led to the generated code, suggesting that the initial code generation\n", + "didn’t meet their needs.\n", + "\n", + "Chatbots, such as ChatGPT and BingChat, are another example. How has daily\n", + "usage changed over time? If the product is sticky, it suggests that users like\n", + "it. Also, how long is the average conversation? This can be tricky to\n", + "interpret: Is a longer conversation better because the conversation was\n", + "engaging and fruitful? Or is it worse because it took the user longer to get\n", + "what they needed?\n", + "\n", + "## Other patterns common in machine learning\n", + "\n", + "Apart from the seven patterns above, there are other patterns in machine\n", + "learning that are also relevant to LLM systems and products. They include:\n", + "\n", + " * [Data flywheel](/writing/more-patterns/#data-flywheel-to-continuously-improve--build-a-moat): Continuous data collection improves the model and leads to a better user experience. This, in turn, promotes more usage which provides more data to further evaluate and fine-tune models, creating a virtuous cycle.\n", + " * [Cascade](/writing/more-patterns/#cascade-to-split-a-problem-into-smaller-problems): Rather than assigning a single, complex task to the LLM, we can simplify and break it down so it only has to handle tasks it excels at, such as reasoning or communicating eloquently. RAG is an example of this. Instead of relying on the LLM to retrieve and rank items based on its internal knowledge, we can augment LLMs with external knowledge and focus on applying the LLM’s reasoning abilities.\n", + " * [Monitoring](/writing/practical-guide-to-maintaining-machine-learning/#monitor-models-for-misbehaviour-when-retraining): This helps demonstrate the value added by the AI system, or the lack of it. Someone shared an anecdote of running an LLM-based customer support solution in prod for two weeks before discontinuing it—an A/B test showed that losses were 12x more when using an LLM as a substitute for their support team!\n", + "\n", + "(Read more about design patterns for [machine learning code](/writing/design-\n", + "patterns/) and [systems](/writing/more-patterns/).)\n", + "\n", + "Also, here’s what others said:\n", + "\n", + "> Separation of concerns/task decomposition- having distinct prompts for\n", + "> distinct subtasks and chaining them together helps w attention and\n", + "> reliability (hurts latency). We were having trouble specifying a rigid\n", + "> output structure AND variable response content so we split up the tasks —\n", + "> [Erick Enriquez](https://twitter.com/generick_ez/status/1681153738822516736)\n", + "\n", + "> A few others that will be needed: role based access control: who can access\n", + "> what; security: if I’m using a DB with an LLM, how do I ensure that I have\n", + "> the right security guards —\n", + "> [Krishna](https://twitter.com/ntkris/status/16812092400299991050)\n", + "\n", + "> Consistent output format: setting outputs to a standardized format such as\n", + "> JSON; Tool augmentation: offload tasks to more specialised, proven, reliable\n", + "> models — [Paul Tune](https://twitter.com/ptuls/status/1681284873741561857)\n", + "\n", + "> Security: mitigate cache poisoning, input validation, mitigate prompt\n", + "> injection, training data provenance, output with non-vulnerable code,\n", + "> mitigate malicious input aimed at influencing requests used by tools (AI\n", + "> Agent), mitigate denial of service (stress test llm), to name a few :) —\n", + "> [Anderson\n", + "> Darario](https://www.linkedin.com/feed/update/urn:li:activity:7087089908229558272?commentUrn=urn%3Ali%3Acomment%3A%28activity%3A7087089908229558272%2C7087224131292684288%29)\n", + "\n", + "> Another ux/ui related: incentivize users to provide feedback on generated\n", + "> answers (implicit or explicit). Implicit could be sth like copilot’s ghost\n", + "> text style, if accepted with TAB, meaning positive feedback etc. — [Wen\n", + "> Yang](https://www.linkedin.com/feed/update/urn:li:activity:7087089908229558272?commentUrn=urn%3Ali%3Acomment%3A%28activity%3A7087089908229558272%2C7087149792660750336%29)\n", + "\n", + "> Great list. I would add consistency checks like self-consistency sampling,\n", + "> chaining and decomposition of tasks, and the emsembling of multiple model\n", + "> outputs. Applying each of these almost daily. [Dan\n", + "> White](https://www.threads.net/@dwhitena/post/Cu3BBaJtoyj/?igshid=OGQ5ZDc2ODk2ZA==)\n", + "\n", + "> Guardrails is super relevant for building analytics tools where llm is a\n", + "> translator from natural to programming language —\n", + "> [m_voitko](https://www.threads.net/@m_voitko/post/Cu1b4liNwCS/?igshid=OGQ5ZDc2ODk2ZA==)\n", + "\n", + "## Conclusion\n", + "\n", + "This is the longest post I’ve written by far. If you’re still with me, thank\n", + "you! I hope you found reading about these patterns helpful, and that the 2x2\n", + "below makes sense.\n", + "\n", + "![LLM patterns across the axis of data to user, and defensive to\n", + "offensive.](/assets/llm-patterns.png)\n", + "\n", + "LLM patterns across the axis of data to user, and defensive to offensive.\n", + "\n", + "We’re still so early on the journey towards building LLM-based systems and\n", + "products. Are there any other key patterns or resources? What have you found\n", + "useful or not useful? I’d love to hear your experience. **Please[reach\n", + "out!](https://twitter.com/eugeneyan)**\n", + "\n", + "## References\n", + "\n", + "Hendrycks, Dan, et al. [“Measuring massive multitask language\n", + "understanding.”](https://arxiv.org/abs/2009.03300) arXiv preprint\n", + "arXiv:2009.03300 (2020).\n", + "\n", + "Gao, Leo, et al. [“A Framework for Few-Shot Language Model\n", + "Evaluation.”](https://github.com/EleutherAI/lm-evaluation-harness) v0.0.1,\n", + "Zenodo, (2021), doi:10.5281/zenodo.5371628.\n", + "\n", + "Liang, Percy, et al. [“Holistic evaluation of language\n", + "models.”](https://arxiv.org/abs/2211.09110) arXiv preprint arXiv:2211.09110\n", + "(2022).\n", + "\n", + "Dubois, Yann, et al. [“AlpacaFarm: A Simulation Framework for Methods That\n", + "Learn from Human Feedback.”](https://github.com/tatsu-lab/alpaca_eval) (2023)\n", + "\n", + "Papineni, Kishore, et al. [“Bleu: a method for automatic evaluation of machine\n", + "translation.”](https://dl.acm.org/doi/10.3115/1073083.1073135) Proceedings of\n", + "the 40th annual meeting of the Association for Computational Linguistics.\n", + "2002.\n", + "\n", + "Lin, Chin-Yew. [“Rouge: A package for automatic evaluation of\n", + "summaries.”](https://aclanthology.org/W04-1013/) Text summarization branches\n", + "out. 2004.\n", + "\n", + "Zhang, Tianyi, et al. [“Bertscore: Evaluating text generation with\n", + "bert.”](https://arxiv.org/abs/1904.09675) arXiv preprint arXiv:1904.09675\n", + "(2019).\n", + "\n", + "Zhao, Wei, et al. [“MoverScore: Text generation evaluating with contextualized\n", + "embeddings and earth mover distance.”](https://arxiv.org/abs/1909.02622) arXiv\n", + "preprint arXiv:1909.02622 (2019).\n", + "\n", + "Sai, Ananya B., Akash Kumar Mohankumar, and Mitesh M. Khapra. [“A survey of\n", + "evaluation metrics used for NLG systems.”](https://arxiv.org/abs/2008.12009)\n", + "ACM Computing Surveys (CSUR) 55.2 (2022): 1-39.\n", + "\n", + "Grusky, Max. [“Rogue Scores.”](https://aclanthology.org/2023.acl-long.107/)\n", + "Proceedings of the 61st Annual Meeting of the Association for Computational\n", + "Linguistics (Volume 1: Long Papers). 2023.\n", + "\n", + "Liu, Yang, et al. [“Gpteval: Nlg evaluation using gpt-4 with better human\n", + "alignment.”](https://arxiv.org/abs/2303.16634) arXiv preprint arXiv:2303.16634\n", + "(2023).\n", + "\n", + "Fourrier, Clémentine, et al. [“What’s going on with the Open LLM\n", + "Leaderboard?”](https://huggingface.co/blog/evaluating-mmlu-leaderboard#whats-\n", + "going-on-with-the-open-llm-leaderboard) (2023).\n", + "\n", + "Zheng, Lianmin, et al. [“Judging LLM-as-a-judge with MT-Bench and Chatbot\n", + "Arena.”](https://arxiv.org/abs/2306.05685) arXiv preprint arXiv:2306.05685\n", + "(2023).\n", + "\n", + "Dettmers, Tim, et al. [“Qlora: Efficient finetuning of quantized\n", + "llms.”](https://arxiv.org/abs/2305.14314) arXiv preprint arXiv:2305.14314\n", + "(2023).\n", + "\n", + "Swyx et al. [MPT-7B and The Beginning of\n", + "Context=Infinity](https://www.latent.space/p/mosaic-mpt-7b#details) (2023).\n", + "\n", + "Fradin, Michelle, Reeder, Lauren [“The New Language Model\n", + "Stack”](https://www.sequoiacap.com/article/llm-stack-perspective/) (2023).\n", + "\n", + "Radford, Alec, et al. [“Learning transferable visual models from natural\n", + "language supervision.”](https://arxiv.org/abs/2103.00020) International\n", + "conference on machine learning. PMLR, 2021.\n", + "\n", + "Yan, Ziyou. [“Search: Query Matching via Lexical, Graph, and Embedding\n", + "Methods.”](https://eugeneyan.com/writing/search-query-matching/)\n", + "eugeneyan.com, (2021).\n", + "\n", + "Petroni, Fabio, et al. [“How context affects language models’ factual\n", + "predictions.”](https://arxiv.org/abs/2005.04611) arXiv preprint\n", + "arXiv:2005.04611 (2020).\n", + "\n", + "Karpukhin, Vladimir, et al. [“Dense passage retrieval for open-domain question\n", + "answering.”](https://arxiv.org/abs/2004.04906) arXiv preprint arXiv:2004.04906\n", + "(2020).\n", + "\n", + "Lewis, Patrick, et al. [“Retrieval-augmented generation for knowledge-\n", + "intensive nlp tasks.”](https://arxiv.org/abs/2005.11401) Advances in Neural\n", + "Information Processing Systems 33 (2020): 9459-9474.\n", + "\n", + "Izacard, Gautier, and Edouard Grave. [“Leveraging passage retrieval with\n", + "generative models for open domain question\n", + "answering.”](https://arxiv.org/abs/2007.01282) arXiv preprint arXiv:2007.01282\n", + "(2020).\n", + "\n", + "Borgeaud, Sebastian, et al. [“Improving language models by retrieving from\n", + "trillions of tokens.”](https://arxiv.org/abs/2112.04426) International\n", + "conference on machine learning. PMLR, (2022).\n", + "\n", + "Lazaridou, Angeliki, et al. [“Internet-augmented language models through few-\n", + "shot prompting for open-domain question\n", + "answering.”](https://arxiv.org/abs/2203.05115) arXiv preprint arXiv:2203.05115\n", + "(2022).\n", + "\n", + "Wang, Yue, et al. [“Codet5+: Open code large language models for code\n", + "understanding and generation.”](https://arxiv.org/abs/2305.07922) arXiv\n", + "preprint arXiv:2305.07922 (2023).\n", + "\n", + "Gao, Luyu, et al. [“Precise zero-shot dense retrieval without relevance\n", + "labels.”](https://arxiv.org/abs/2212.10496) arXiv preprint arXiv:2212.10496\n", + "(2022).\n", + "\n", + "Yan, Ziyou. [“Obsidian-Copilot: An Assistant for Writing &\n", + "Reflecting.”](https://eugeneyan.com/writing/obsidian-copilot/) eugeneyan.com,\n", + "(2023).\n", + "\n", + "Bojanowski, Piotr, et al. [“Enriching word vectors with subword\n", + "information.”](https://arxiv.org/abs/1607.04606) Transactions of the\n", + "association for computational linguistics 5 (2017): 135-146.\n", + "\n", + "Reimers, Nils, and Iryna Gurevych. [“Making Monolingual Sentence Embeddings\n", + "Multilingual Using Knowledge Distillation.”](https://arxiv.org/abs/2004.09813)\n", + "Proceedings of the 2020 Conference on Empirical Methods in Natural Language\n", + "Processing, Association for Computational Linguistics, (2020).\n", + "\n", + "Wang, Liang, et al. [“Text embeddings by weakly-supervised contrastive pre-\n", + "training.”](https://arxiv.org/abs/2212.03533) arXiv preprint arXiv:2212.03533\n", + "(2022).\n", + "\n", + "Su, Hongjin, et al. [“One embedder, any task: Instruction-finetuned text\n", + "embeddings.”](https://arxiv.org/abs/2212.09741) arXiv preprint\n", + "arXiv:2212.09741 (2022).\n", + "\n", + "Johnson, Jeff, et al. [“Billion-Scale Similarity Search with\n", + "GPUs.”](https://arxiv.org/abs/1702.08734) IEEE Transactions on Big Data, vol.\n", + "7, no. 3, IEEE, 2019, pp. 535–47.\n", + "\n", + "Malkov, Yu A., and Dmitry A. Yashunin. [“Efficient and Robust Approximate\n", + "Nearest Neighbor Search Using Hierarchical Navigable Small World\n", + "Graphs.”](https://arxiv.org/abs/1603.09320) IEEE Transactions on Pattern\n", + "Analysis and Machine Intelligence, vol. 42, no. 4, IEEE, 2018, pp. 824–36.\n", + "\n", + "Guo, Ruiqi, et al. [“Accelerating Large-Scale Inference with Anisotropic\n", + "Vector Quantization.”](https://arxiv.org/abs/1908.10396.) International\n", + "Conference on Machine Learning, (2020)\n", + "\n", + "Ouyang, Long, et al. [“Training language models to follow instructions with\n", + "human feedback.”](https://arxiv.org/abs/2203.02155) Advances in Neural\n", + "Information Processing Systems 35 (2022): 27730-27744.\n", + "\n", + "Howard, Jeremy, and Sebastian Ruder. [“Universal language model fine-tuning\n", + "for text classification.”](https://arxiv.org/abs/1801.06146) arXiv preprint\n", + "arXiv:1801.06146 (2018).\n", + "\n", + "Devlin, Jacob, et al. [“Bert: Pre-training of deep bidirectional transformers\n", + "for language understanding.”](https://arxiv.org/abs/1810.04805) arXiv preprint\n", + "arXiv:1810.04805 (2018).\n", + "\n", + "Radford, Alec, et al. [“Improving language understanding with unsupervised\n", + "learning.”](https://openai.com/research/language-unsupervised) (2018).\n", + "\n", + "Raffel, Colin, et al. [“Exploring the limits of transfer learning with a\n", + "unified text-to-text transformer.”](https://arxiv.org/abs/1910.10683) The\n", + "Journal of Machine Learning Research 21.1 (2020): 5485-5551.\n", + "\n", + "Lester, Brian, Rami Al-Rfou, and Noah Constant. [“The power of scale for\n", + "parameter-efficient prompt tuning.”](https://arxiv.org/abs/2104.08691) arXiv\n", + "preprint arXiv:2104.08691 (2021).\n", + "\n", + "Li, Xiang Lisa, and Percy Liang. [“Prefix-tuning: Optimizing continuous\n", + "prompts for generation.”](https://arxiv.org/abs/2101.00190) arXiv preprint\n", + "arXiv:2101.00190 (2021).\n", + "\n", + "Houlsby, Neil, et al. [“Parameter-efficient transfer learning for\n", + "NLP.”](https://arxiv.org/abs/1902.00751) International Conference on Machine\n", + "Learning. PMLR, 2019.\n", + "\n", + "Hu, Edward J., et al. [“Lora: Low-rank adaptation of large language\n", + "models.”](https://arxiv.org/abs/2106.09685) arXiv preprint arXiv:2106.09685\n", + "(2021).\n", + "\n", + "Dettmers, Tim, et al. [“Qlora: Efficient finetuning of quantized\n", + "llms.”](https://arxiv.org/abs/2305.14314) arXiv preprint arXiv:2305.14314\n", + "(2023).\n", + "\n", + "Williams, Adina, et al. [“A Broad-Coverage Challenge Corpus for Sentence\n", + "Understanding through Inference.”](https://cims.nyu.edu/~sbowman/multinli/)\n", + "Proceedings of the 2018 Conference of the North American Chapter of the\n", + "Association for Computational Linguistics: Human Language Technologies, Volume\n", + "1 (Long Papers), Association for Computational Linguistics, (2018).\n", + "\n", + "[GPTCache](https://github.com/zilliztech/GPTCache) (2023).\n", + "\n", + "Bai, Yuntao, et al. [“Training a helpful and harmless assistant with\n", + "reinforcement learning from human\n", + "feedback.”](https://arxiv.org/abs/2204.05862) arXiv preprint arXiv:2204.05862\n", + "(2022).\n", + "\n", + "[Guardrails](https://github.com/ShreyaR/guardrails) (2023)\n", + "\n", + "[NeMo-Guardrails](https://github.com/NVIDIA/NeMo-Guardrails) (2023)\n", + "\n", + "Manakul, Potsawee, Adian Liusie, and Mark JF Gales. [“Selfcheckgpt: Zero-\n", + "resource black-box hallucination detection for generative large language\n", + "models.”](https://arxiv.org/abs/2303.08896) arXiv preprint arXiv:2303.08896\n", + "(2023).\n", + "\n", + "[Guidance](https://github.com/microsoft/guidance) (2023).\n", + "\n", + "Amershi, Saleema, et al. [“Guidelines for human-AI\n", + "interaction.”](https://www.microsoft.com/en-\n", + "us/research/publication/guidelines-for-human-ai-interaction/) Proceedings of\n", + "the 2019 chi conference on human factors in computing systems. 2019.\n", + "\n", + "[People + AI Guidebook](https://pair.withgoogle.com/guidebook/) (2023).\n", + "\n", + "[Human Interface Guidelines for Machine\n", + "Learning](https://developer.apple.com/design/human-interface-\n", + "guidelines/machine-learning) (2023).\n", + "\n", + "Schendel, Zachary A., Faraz Farzin, and Siddhi Sundar. [“A Human Perspective\n", + "on Algorithmic Similarity.”](https://slideslive.com/38934788/a-human-\n", + "perspective-on-algorithmic-similarity?ref=folder-59726) Proceedings of the\n", + "14th ACM Conference on Recommender Systems. 2020.\n", + "\n", + " \n", + "\n", + "If you found this useful, please cite this write-up as:\n", + "\n", + "> Yan, Ziyou. (Jul 2023). Patterns for Building LLM-based Systems & Products.\n", + "> eugeneyan.com. https://eugeneyan.com/writing/llm-patterns/.\n", + "\n", + "or\n", + "\n", + " \n", + " \n", + " @article{yan2023llm-patterns,\n", + " title = {Patterns for Building LLM-based Systems & Products},\n", + " author = {Yan, Ziyou},\n", + " journal = {eugeneyan.com},\n", + " year = {2023},\n", + " month = {Jul},\n", + " url = {https://eugeneyan.com/writing/llm-patterns/}\n", + " }\n", + "\n", + " \n", + "Share on:\n", + "\n", + "![](/assets/icon-twitter.svg)\n", + "\n", + "![](/assets/icon-linkedin.svg)\n", + "\n", + "![](/assets/icon-facebook.svg)\n", + "\n", + "![](/assets/icon-mail.svg)\n", + "\n", + " \n", + "Browse related tags: [ [llm](/tag/llm/) [engineering](/tag/engineering/)\n", + "[production](/tag/production/) [🔥](/tag/🔥/) ]\n", + "\n", + "[ ![](/assets/icon-search.svg)Search](/search/ \"Search\")\n", + "\n", + "[« Obsidian-Copilot: An Assistant for Writing & Reflecting](/writing/obsidian-\n", + "copilot/) [How to Match LLM Patterns to Problems »](/writing/llm-problems/)\n", + "\n", + "* * *\n", + "\n", + "Join **6,800+** readers getting updates on machine learning, RecSys, LLMs, and\n", + "engineering.\n", + "\n", + "Get email updates\n", + "\n", + "* * *\n", + "\n", + " * ![](/assets/icon-twitter.svg) [Twitter](https://twitter.com/eugeneyan \"Twitter\")\n", + " * ![](/assets/icon-linkedin.svg) [LinkedIn](https://www.linkedin.com/in/eugeneyan/ \"Linkedin\")\n", + " * ![](/assets/icon-threads.svg) [Threads](https://www.threads.net/@eugeneyan \"Threads\")\n", + " * ![](/assets/icon-github.svg) [GitHub](https://github.com/eugeneyan/ \"GitHub\")\n", + "\n", + "Eugene Yan designs, builds, and operates machine learning systems that serve\n", + "customers at scale. He's currently a Senior Applied Scientist at Amazon.\n", + "Previously, he led machine learning at Lazada (acquired by Alibaba) and a\n", + "Healthtech Series A. He [writes](/writing/) & [speaks](/speaking/) about\n", + "machine learning, recommenders, LLMs, and engineering at\n", + "[eugeneyan.com](https://eugeneyan.com/) and\n", + "[ApplyingML.com](https://applyingml.com/).\n", + "\n", + "© Eugene Yan 2015 - 2024 • [Feedback](/site-feedback/) • [RSS](/rss/)\n", + "\n", + "\n" + ] + } + ], + "source": [ + "print(docs[0].get_content())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "orig_nodes = node_parser.get_nodes_from_documents(docs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "because evals were often conducted with untested, incorrect\n", + "ROUGE implementations.\n", + "\n", + "![Dimensions of model evaluations with ROUGE](/assets/rogue-scores.jpg)\n", + "\n", + "Dimensions of model evaluations with ROUGE\n", + "([source](https://aclanthology.org/2023.acl-long.107/))\n", + "\n", + "And even with recent benchmarks such as MMLU, **the same model can get\n", + "significantly different scores based on the eval implementation**.\n", + "[Huggingface compared the original MMLU\n", + "implementation](https://huggingface.co/blog/evaluating-mmlu-leaderboard) with\n", + "the HELM and EleutherAI implementations and found that the same example could\n", + "have different prompts across various providers.\n", + "\n", + "![Different prompts for the same question across MMLU\n", + "implementations](/assets/mmlu-prompt.jpg)\n", + "\n", + "Different prompts for the same question across MMLU implementations\n", + "([source](https://huggingface.co/blog/evaluating-mmlu-leaderboard))\n", + "\n", + "Furthermore, the evaluation approach differed across all three benchmarks:\n", + "\n", + " * Original MMLU: Compares predicted probabilities on the answers only (A, B, C, D)\n", + " * HELM: Uses the next token probabilities from the model and picks the token with the\n" + ] + } + ], + "source": [ + "print(orig_nodes[20:28][3].get_content(metadata_mode=\"all\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Question Extractor on Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 8/8 [00:03<00:00, 2.04it/s]\n" + ] + } + ], + "source": [ + "nodes_1 = node_parser.get_nodes_from_documents(docs)[20:28]\n", + "nodes_1 = question_extractor(nodes_1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Excerpt from document]\n", + "questions_this_excerpt_can_answer: 1. How do different implementations of the MMLU benchmark affect the scores of the same model?\n", + "2. What are the differences in evaluation approaches between the original MMLU benchmark, HELM, and EleutherAI implementations?\n", + "3. How do various providers differ in the prompts they use for evaluating models in the MMLU benchmark?\n", + "Excerpt:\n", + "-----\n", + "because evals were often conducted with untested, incorrect\n", + "ROUGE implementations.\n", + "\n", + "![Dimensions of model evaluations with ROUGE](/assets/rogue-scores.jpg)\n", + "\n", + "Dimensions of model evaluations with ROUGE\n", + "([source](https://aclanthology.org/2023.acl-long.107/))\n", + "\n", + "And even with recent benchmarks such as MMLU, **the same model can get\n", + "significantly different scores based on the eval implementation**.\n", + "[Huggingface compared the original MMLU\n", + "implementation](https://huggingface.co/blog/evaluating-mmlu-leaderboard) with\n", + "the HELM and EleutherAI implementations and found that the same example could\n", + "have different prompts across various providers.\n", + "\n", + "![Different prompts for the same question across MMLU\n", + "implementations](/assets/mmlu-prompt.jpg)\n", + "\n", + "Different prompts for the same question across MMLU implementations\n", + "([source](https://huggingface.co/blog/evaluating-mmlu-leaderboard))\n", + "\n", + "Furthermore, the evaluation approach differed across all three benchmarks:\n", + "\n", + " * Original MMLU: Compares predicted probabilities on the answers only (A, B, C, D)\n", + " * HELM: Uses the next token probabilities from the model and picks the token with the\n", + "-----\n" + ] + } + ], + "source": [ + "print(nodes_1[3].get_content(metadata_mode=\"all\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Build Indices" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex\n", + "from llama_index.core.response.notebook_utils import (\n", + " display_source_node,\n", + " display_response,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "index0 = VectorStoreIndex(orig_nodes)\n", + "index1 = VectorStoreIndex(orig_nodes[:20] + nodes_1 + orig_nodes[28:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Query Engines" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_engine0 = index0.as_query_engine(similarity_top_k=1)\n", + "query_engine1 = index1.as_query_engine(similarity_top_k=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_str = (\n", + " \"Can you describe metrics for evaluating text generation quality, compare\"\n", + " \" them, and tell me about their downsides\"\n", + ")\n", + "\n", + "response0 = query_engine0.query(query_str)\n", + "response1 = query_engine1.query(query_str)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "**`Final Response:`** Metrics for evaluating text generation quality can be categorized as context-dependent or context-free. Context-dependent metrics consider the context of the task and may need adjustments for different tasks. On the other hand, context-free metrics do not consider task-specific context and are easier to apply across various tasks.\n", + "\n", + "Some commonly used metrics for evaluating text generation quality include BLEU, ROUGE, BERTScore, and MoverScore. \n", + "\n", + "- **BLEU (Bilingual Evaluation Understudy)** is a precision-based metric that compares n-grams in the generated output with those in the reference. \n", + "- **ROUGE (Recall-Oriented Understudy for Gisting Evaluation)** evaluates the overlap between the generated output and reference summaries.\n", + "- **BERTScore** leverages contextual embeddings to measure the similarity between the generated output and reference.\n", + "- **MoverScore** considers the semantic similarity between the generated output and reference using Earth Mover's Distance.\n", + "\n", + "Each of these metrics has its own strengths and weaknesses. For example, BLEU may not capture the overall fluency and coherence of the generated text, while ROUGE may not consider the semantic meaning adequately. BERTScore and MoverScore, on the other hand, may require pre-trained models and can be computationally expensive. It's important to consider the specific requirements of the task when selecting an appropriate evaluation metric." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "---" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**`Source Node 1/1`**" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**Node ID:** 4edc4466-e9ae-47ae-b0ee-8a8ac27a0378
**Similarity:** 0.8381672789063448
**Text:** GPT-4) prefers the output of one model over a reference model. Metrics include win rate, bias, latency, price, variance, etc. Validated to have high agreement with 20k human annotations.\n", + "\n", + "We can group metrics into two categories: context-dependent or context-free.\n", + "\n", + " * **Context-dependent** : These take context into account. They’re often proposed for a specific task; repurposing them for other tasks will require some adjustment.\n", + " * **Context-free** : These aren’t tied to the context when evaluating generated output; they only compare the output with the provided gold references. As they’re task agnostic, they’re easier to apply to a wide variety of tasks.\n", + "\n", + "To get a better sense of these metrics (and their potential shortfalls), we’ll\n", + "explore a few of the commonly used metrics such as BLEU, ROUGE, BERTScore, and\n", + "MoverScore.\n", + "\n", + "**[BLEU](https://dl.acm.org/doi/10.3115/1073083.1073135) (Bilingual Evaluation\n", + "Understudy)** is a precision-based metric: It counts the number of n-grams in\n", + "th...
**Metadata:** {}
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display_response(\n", + " response0, source_length=1000, show_source=True, show_source_metadata=True\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "**`Final Response:`** Metrics for evaluating text generation quality include BLEU and ROUGE. These metrics are commonly used but have limitations. BLEU and ROUGE have shown poor correlation with human judgments in terms of fluency and adequacy. They also exhibit low correlation with tasks that require creativity and diversity in text generation. Additionally, exact match metrics like BLEU and ROUGE are not suitable for tasks such as abstractive summarization or dialogue in text generation due to their reliance on n-gram overlap, which may not capture the nuances of these tasks effectively." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "---" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**`Source Node 1/1`**" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**Node ID:** 52856a1d-be29-494a-84be-e8db8a736675
**Similarity:** 0.8459422950143721
**Text:** finds the minimum effort to transform\n", + "one text into another. The idea is to measure the distance that words would\n", + "have to move to convert one sequence to another.\n", + "\n", + "However, there are several pitfalls to using these conventional benchmarks and\n", + "metrics.\n", + "\n", + "First, there’s **poor correlation between these metrics and human judgments.**\n", + "BLEU, ROUGE, and others have had [negative correlation with how humans\n", + "evaluate fluency](https://arxiv.org/abs/2008.12009). They also showed moderate\n", + "to less correlation with human adequacy scores. In particular, BLEU and ROUGE\n", + "have [low correlation with tasks that require creativity and\n", + "diversity](https://arxiv.org/abs/2303.16634).\n", + "\n", + "Second, these metrics often have **poor adaptability to a wider variety of\n", + "tasks**. Adopting a metric proposed for one task to another is not always\n", + "prudent. For example, exact match metrics such as BLEU and ROUGE are a poor\n", + "fit for tasks like abstractive summarization or dialogue. Since they’re based\n", + "on n-gram overlap between ...
**Metadata:** {'questions_this_excerpt_can_answer': '1. How do conventional benchmarks and metrics for measuring text transformation performance compare to human judgments in terms of fluency and adequacy?\\n2. What is the correlation between metrics like BLEU and ROUGE and tasks that require creativity and diversity in text generation?\\n3. Why are exact match metrics like BLEU and ROUGE not suitable for tasks like abstractive summarization or dialogue in text generation?'}
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display_response(\n", + " response1, source_length=1000, show_source=True, show_source_metadata=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Extract Metadata Using PydanticProgramExtractor\n", + "\n", + "PydanticProgramExtractor enables extracting an entire Pydantic object using an LLM.\n", + "\n", + "This approach allows for extracting multiple entities in a single LLM call, offering an advantage over using a single metadata extractor." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pydantic import BaseModel, Field\n", + "from typing import List" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup the Pydantic Model¶\n", + "\n", + "Here we define a basic structured schema that we want to extract. It contains:\n", + "\n", + "Entities: unique entities in a text chunk\n", + "Summary: a concise summary of the text chunk" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "class NodeMetadata(BaseModel):\n", + " \"\"\"Node metadata.\"\"\"\n", + "\n", + " entities: List[str] = Field(\n", + " ..., description=\"Unique entities in this text chunk.\"\n", + " )\n", + " summary: str = Field(\n", + " ..., description=\"A concise summary of this text chunk.\"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup the Extractor¶\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.program.openai import OpenAIPydanticProgram\n", + "from llama_index.core.extractors import PydanticProgramExtractor\n", + "\n", + "EXTRACT_TEMPLATE_STR = \"\"\"\\\n", + "Here is the content of the section:\n", + "----------------\n", + "{context_str}\n", + "----------------\n", + "Given the contextual information, extract out a {class_name} object.\\\n", + "\"\"\"\n", + "\n", + "openai_program = OpenAIPydanticProgram.from_defaults(\n", + " output_cls=NodeMetadata,\n", + " prompt_template_str=\"{input}\",\n", + " extract_template_str=EXTRACT_TEMPLATE_STR,\n", + ")\n", + "\n", + "metadata_extractor = PydanticProgramExtractor(\n", + " program=openai_program, input_key=\"input\", show_progress=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Extract metadata from the node" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 1/1 [00:01<00:00, 1.51s/it]\n" + ] + } + ], + "source": [ + "extract_metadata = metadata_extractor.extract(orig_nodes[0:1])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'entities': ['eugeneyan', 'llm', 'engineering', 'production'],\n", + " 'summary': 'Patterns for Building LLM-based Systems & Products - Discussions on HackerNews, Twitter, and LinkedIn. There is a large class of problems that are easy to imagine and build demos for, but extremely hard to make products out of. For example, self-driving: It’s easy to demo a'}]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "extract_metadata" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 1/1 [00:01<00:00, 1.03s/it]\n" + ] + } + ], + "source": [ + "metadata_nodes = metadata_extractor.process_nodes(orig_nodes[0:1])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[TextNode(id_='2b6a40a8-dd6a-44a8-a005-da32ad98a05c', embedding=None, metadata={'entities': ['eugeneyan', 'llm', 'engineering', 'production'], 'summary': 'Patterns for Building LLM-based Systems & Products - Discussions on HackerNews, Twitter, and LinkedIn. Content includes discussions on self-driving technology and challenges in turning demos into products.'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={: RelatedNodeInfo(node_id='https://eugeneyan.com/writing/llm-patterns/', node_type=, metadata={}, hash='9da2827b0860b2f81e51cb3efd93a13227f0e4312355a495e5668669f257cb14'), : RelatedNodeInfo(node_id='d3a86dba-7579-4196-80d7-30affa7052a7', node_type=, metadata={}, hash='993e43bb060cf2f183f894f8dec6708eadcac2b7d2760a94916dc82c24255acc')}, text='# [eugeneyan](/)\\n\\n * [Start Here](/start-here/ \"Start Here\")\\n * [Writing](/writing/ \"Writing\")\\n * [Speaking](/speaking/ \"Speaking\")\\n * [Prototyping](/prototyping/ \"Prototyping\")\\n * [About](/about/ \"About\")\\n\\n# Patterns for Building LLM-based Systems & Products\\n\\n[ [llm](/tag/llm/) [engineering](/tag/engineering/)\\n[production](/tag/production/) [🔥](/tag/🔥/) ] · 66 min read\\n\\n> Discussions on [HackerNews](https://news.ycombinator.com/item?id=36965993),\\n> [Twitter](https://twitter.com/eugeneyan/status/1686531758701899776), and\\n> [LinkedIn](https://www.linkedin.com/posts/eugeneyan_patterns-for-building-\\n> llm-based-systems-activity-7092300473981927424-_wVo)\\n\\n“There is a large class of problems that are easy to imagine and build demos\\nfor, but extremely hard to make products out of. For example, self-driving:\\nIt’s easy to demo a', start_char_idx=0, end_char_idx=838, text_template='[Excerpt from document]\\n{metadata_str}\\nExcerpt:\\n-----\\n{content}\\n-----\\n', metadata_template='{key}: {value}', metadata_seperator='\\n')]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "metadata_nodes" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-5/Observability.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-5/Observability.ipynb new file mode 100644 index 0000000000000..1f08ea9a78768 --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-5/Observability.ipynb @@ -0,0 +1,483 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

Observability with Arize Phoenix - Tracing and Evaluating a LlamaIndex Application

\n", + "\n", + "LlamaIndex provides high-level APIs that enable users to build powerful applications in a few lines of code. However, it can be challenging to understand what is going on under the hood and to pinpoint the cause of issues. Phoenix makes your LLM applications *observable* by visualizing the underlying structure of each call to your query engine and surfacing problematic `spans`` of execution based on latency, token count, or other evaluation metrics.\n", + "\n", + "In this tutorial, you will:\n", + "- Build a simple query engine using LlamaIndex that uses retrieval-augmented generation to answer questions over the Paul Graham Essay,\n", + "- Record trace data in [OpenInference tracing](https://github.com/Arize-ai/openinference) format using the global `arize_phoenix` handler\n", + "- Inspect the traces and spans of your application to identify sources of latency and cost,\n", + "- Export your trace data as a pandas dataframe and run an [LLM Evals](https://docs.arize.com/phoenix/concepts/llm-evals).\n", + "\n", + "ℹ️ This notebook requires an OpenAI API key.\n", + "\n", + "[Observability Documentation](https://docs.llamaindex.ai/en/stable/module_guides/observability/)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Install Dependencies and Import Libraries\n", + "\n", + "Install Phoenix, LlamaIndex, and OpenAI." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index\n", + "!pip install llama-index-callbacks-arize-phoenix\n", + "!pip install arize-phoenix[evals]\n", + "!pip install \"openinference-instrumentation-llama-index>=1.0.0\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "import os\n", + "from getpass import getpass\n", + "from urllib.request import urlopen\n", + "\n", + "import nest_asyncio\n", + "import openai\n", + "import pandas as pd\n", + "import phoenix as px\n", + "from llama_index.core import (\n", + " Settings,\n", + " set_global_handler,\n", + ")\n", + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.llms.openai import OpenAI\n", + "from phoenix.evals import (\n", + " HallucinationEvaluator,\n", + " OpenAIModel,\n", + " QAEvaluator,\n", + " RelevanceEvaluator,\n", + " run_evals,\n", + ")\n", + "from phoenix.session.evaluation import (\n", + " get_qa_with_reference,\n", + " get_retrieved_documents,\n", + ")\n", + "from phoenix.trace import DocumentEvaluations, SpanEvaluations\n", + "from tqdm import tqdm\n", + "\n", + "nest_asyncio.apply()\n", + "pd.set_option(\"display.max_colwidth\", 1000)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Launch Phoenix\n", + "\n", + "You can run Phoenix in the background to collect trace data emitted by any LlamaIndex application that has been instrumented with the `OpenInferenceTraceCallbackHandler`. Phoenix supports LlamaIndex's [one-click observability](https://gpt-index.readthedocs.io/en/latest/end_to_end_tutorials/one_click_observability.html) which will automatically instrument your LlamaIndex application! You can consult our [integration guide](https://docs.arize.com/phoenix/integrations/llamaindex) for a more detailed explanation of how to instrument your LlamaIndex application.\n", + "\n", + "Launch Phoenix and follow the instructions in the cell output to open the Phoenix UI (the UI should be empty because we have yet to run the LlamaIndex application)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "🌍 To view the Phoenix app in your browser, visit https://jfgzmj4xrg3-496ff2e9c6d22116-6006-colab.googleusercontent.com/\n", + "📺 To view the Phoenix app in a notebook, run `px.active_session().view()`\n", + "📖 For more information on how to use Phoenix, check out https://docs.arize.com/phoenix\n" + ] + } + ], + "source": [ + "session = px.launch_app()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Configure Your OpenAI API Key\n", + "\n", + "Set your OpenAI API key if it is not already set as an environment variable." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. Build Index and Create QueryEngine\n", + "\n", + "a. Download Data\n", + "\n", + "b. Load Data\n", + "\n", + "c. Setup Phoenix Tracing\n", + "\n", + "d. Setup LLM And Embedding Model\n", + "\n", + "e. Create Index\n", + "\n", + "f. Create Query Engine\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-04-26 03:09:56-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 75042 (73K) [text/plain]\n", + "Saving to: ‘paul_graham_essay.txt’\n", + "\n", + "paul_graham_essay.t 100%[===================>] 73.28K --.-KB/s in 0.01s \n", + "\n", + "2024-04-26 03:09:56 (5.58 MB/s) - ‘paul_graham_essay.txt’ saved [75042/75042]\n", + "\n", + "--2024-04-26 03:09:56-- http://paul_graham_essay.txt/\n", + "Resolving paul_graham_essay.txt (paul_graham_essay.txt)... failed: Name or service not known.\n", + "wget: unable to resolve host address ‘paul_graham_essay.txt’\n", + "FINISHED --2024-04-26 03:09:56--\n", + "Total wall clock time: 0.2s\n", + "Downloaded: 1 files, 73K in 0.01s (5.58 MB/s)\n" + ] + } + ], + "source": [ + "!wget \"https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\" \"paul_graham_essay.txt\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex, SimpleDirectoryReader\n", + "\n", + "documents = SimpleDirectoryReader(\n", + " input_files=[\"paul_graham_essay.txt\"]\n", + ").load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup Phoenix Tracing\n", + "\n", + "Enable Phoenix tracing within LlamaIndex by setting `arize_phoenix` as the global handler. This will mount Phoenix's [OpenInferenceTraceCallback](https://docs.arize.com/phoenix/integrations/llamaindex) as the global handler. Phoenix uses OpenInference traces - an open-source standard for capturing and storing LLM application traces that enables LLM applications to seamlessly integrate with LLM observability solutions such as Phoenix." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "set_global_handler(\"arize_phoenix\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup LLM and Embedding Model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core import Settings\n", + "\n", + "llm = OpenAI(model=\"gpt-3.5-turbo\", temperature=0.2)\n", + "embed_model = OpenAIEmbedding()\n", + "\n", + "Settings.llm = llm\n", + "Settings.embed_model = embed_model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex\n", + "\n", + "index = VectorStoreIndex.from_documents(documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Query Engine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_engine = index.as_query_engine(similarity_top_k=5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 5. Run Your Query Engine and View Your Traces in Phoenix\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "queries = [\n", + " \"what did paul graham do growing up?\",\n", + " \"why did paul graham start YC?\",\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 2/2 [00:07<00:00, 3.81s/it]\n" + ] + } + ], + "source": [ + "for query in tqdm(queries):\n", + " query_engine.query(query)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham is a writer, entrepreneur, and investor known for his involvement in various projects and ventures. He has written essays on diverse topics, founded companies like Viaweb and Y Combinator, and has a strong presence in the startup and technology industry.\n" + ] + } + ], + "source": [ + "print(query_engine.query(\"Who is Paul Graham?\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "🚀 Open the Phoenix UI if you haven't already: https://jfgzmj4xrg4-496ff2e9c6d22116-6006-colab.googleusercontent.com/\n" + ] + } + ], + "source": [ + "print(f\"🚀 Open the Phoenix UI if you haven't already: {session.url}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 6. Export and Evaluate Your Trace Data\n", + "\n", + "You can export your trace data as a pandas dataframe for further analysis and evaluation.\n", + "\n", + "In this case, we will export our `retriever` spans into two separate dataframes:\n", + "- `queries_df`, in which the retrieved documents for each query are concatenated into a single column,\n", + "- `retrieved_documents_df`, in which each retrieved document is \"exploded\" into its own row to enable the evaluation of each query-document pair in isolation.\n", + "\n", + "This will enable us to compute multiple kinds of evaluations, including:\n", + "- relevance: Are the retrieved documents grounded in the response?\n", + "- Q&A correctness: Are your application's responses grounded in the retrieved context?\n", + "- hallucinations: Is your application making up false information?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "queries_df = get_qa_with_reference(px.Client())\n", + "retrieved_documents_df = get_retrieved_documents(px.Client())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, define your evaluation model and your evaluators.\n", + "\n", + "Evaluators are built on top of language models and prompt the LLM to assess the quality of responses, the relevance of retrieved documents, etc., and provide a quality signal even in the absence of human-labeled data. Pick an evaluator type and instantiate it with the language model you want to use to perform evaluations using our battle-tested evaluation templates." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "00fa4235432f442f8b921841d93e26a3", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "run_evals | | 0/6 (0.0%) | ⏳ 00:00 int:\n", + " \"\"\"Multiply two integers and returns the result integer\"\"\"\n", + " return a * b\n", + "\n", + "\n", + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two integers and returns the result integer\"\"\"\n", + " return a + b\n", + "\n", + "\n", + "def subtract(a: int, b: int) -> int:\n", + " \"\"\"Subtract two integers and returns the result integer\"\"\"\n", + " return a - b\n", + "\n", + "\n", + "multiply_tool = FunctionTool.from_defaults(fn=multiply)\n", + "add_tool = FunctionTool.from_defaults(fn=add)\n", + "subtract_tool = FunctionTool.from_defaults(fn=subtract)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### With ReAct Agent" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "agent = ReActAgent.from_tools(\n", + " [multiply_tool, add_tool, subtract_tool], llm=llm, verbose=True\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[1;3;38;5;200mThought: The user wants to perform a mathematical operation. I need to first multiply 26 by 2 and then add the result to 2024. I'll use the 'multiply' tool first.\n", + "Action: multiply\n", + "Action Input: {'a': 26, 'b': 2}\n", + "\u001b[0m\u001b[1;3;34mObservation: 52\n", + "\u001b[0m\u001b[1;3;38;5;200mThought: The multiplication result is 52. Now, I need to add this result to 2024. I'll use the 'add' tool for this.\n", + "Action: add\n", + "Action Input: {'a': 52, 'b': 2024}\n", + "\u001b[0m\u001b[1;3;34mObservation: 2076\n", + "\u001b[0m\u001b[1;3;38;5;200mThought: I can answer without using any more tools. I'll use the user's language to answer.\n", + "Answer: 2076\n", + "\u001b[0m" + ] + } + ], + "source": [ + "response = agent.chat(\"What is (26 * 2) + 2024?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

2076

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### With Function Calling." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + " [multiply_tool, add_tool, subtract_tool],\n", + " llm=llm,\n", + " verbose=True,\n", + " allow_parallel_tool_calls=False,\n", + ")\n", + "agent = agent_worker.as_agent()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: What is (26 * 2) + 2024?\n", + "=== Calling Function ===\n", + "Calling function: multiply with args: {\"a\": 26, \"b\": 2}\n", + "=== Function Output ===\n", + "52\n", + "=== Calling Function ===\n", + "Calling function: add with args: {\"a\": 52, \"b\": 2024}\n", + "=== Function Output ===\n", + "2076\n", + "=== LLM Response ===\n", + "The result of (26 * 2) + 2024 is 2076.\n" + ] + } + ], + "source": [ + "response = agent.chat(\"What is (26 * 2) + 2024?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

assistant: The result of (26 * 2) + 2024 is 2076.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Agent with RAG Query Engine Tools" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Download Data\n", + "\n", + "We will use `Uber-2021` and `Lyft-2021` 10K SEC filings." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-05-16 14:00:56-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/uber_2021.pdf\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1880483 (1.8M) [application/octet-stream]\n", + "Saving to: ‘./uber_2021.pdf’\n", + "\n", + "./uber_2021.pdf 100%[===================>] 1.79M 8.54MB/s in 0.2s \n", + "\n", + "2024-05-16 14:00:57 (8.54 MB/s) - ‘./uber_2021.pdf’ saved [1880483/1880483]\n", + "\n", + "--2024-05-16 14:00:57-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/lyft_2021.pdf\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1440303 (1.4M) [application/octet-stream]\n", + "Saving to: ‘./lyft_2021.pdf’\n", + "\n", + "./lyft_2021.pdf 100%[===================>] 1.37M 6.90MB/s in 0.2s \n", + "\n", + "2024-05-16 14:00:58 (6.90 MB/s) - ‘./lyft_2021.pdf’ saved [1440303/1440303]\n", + "\n" + ] + } + ], + "source": [ + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/uber_2021.pdf' -O './uber_2021.pdf'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/lyft_2021.pdf' -O './lyft_2021.pdf'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SimpleDirectoryReader\n", + "\n", + "uber_docs = SimpleDirectoryReader(input_files=[\"./uber_2021.pdf\"]).load_data()\n", + "lyft_docs = SimpleDirectoryReader(input_files=[\"./lyft_2021.pdf\"]).load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Build RAG on uber and lyft docs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex\n", + "\n", + "uber_index = VectorStoreIndex.from_documents(uber_docs)\n", + "uber_query_engine = uber_index.as_query_engine(similarity_top_k=3)\n", + "\n", + "lyft_index = VectorStoreIndex.from_documents(lyft_docs)\n", + "lyft_query_engine = lyft_index.as_query_engine(similarity_top_k=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = uber_query_engine.query(\"What are the investments of Uber in 2021?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

In 2021, Uber made significant investments to expand its international operations and compete with local and other global competitors. This included the acquisitions of Careem and Cornershop. Additionally, Uber continued to develop new technologies to enhance existing offerings and services, and to expand the range of its offerings through research and development. They also launched Uber One in the United States, a cross-platform membership program that brings together the best of Uber.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = lyft_query_engine.query(\"What are lyft investments in 2021?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

In 2021, Lyft continued to invest in expanding its network of Light Vehicles and Lyft Autonomous, focusing on the deployment and scaling of third-party self-driving technology on the Lyft network. The company also made a commitment to reach 100% electric vehicles on the Lyft network by the end of 2030. Additionally, Lyft completed a transaction with Woven Planet, a subsidiary of Toyota Motor Corporation, for the divestiture of certain assets related to its self-driving vehicle division, Level 5.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### `FunctionCallingAgent` with RAG QueryEngineTools.\n", + "\n", + "Here we use `Fuction Calling` capabilities of the model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", + "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "\n", + "query_engine_tools = [\n", + " QueryEngineTool(\n", + " query_engine=lyft_query_engine,\n", + " metadata=ToolMetadata(\n", + " name=\"lyft_10k\",\n", + " description=\"Provides information about Lyft financials for year 2021\",\n", + " ),\n", + " ),\n", + " QueryEngineTool(\n", + " query_engine=uber_query_engine,\n", + " metadata=ToolMetadata(\n", + " name=\"uber_10k\",\n", + " description=\"Provides information about Uber financials for year 2021\",\n", + " ),\n", + " ),\n", + "]\n", + "\n", + "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + " query_engine_tools,\n", + " llm=llm,\n", + " verbose=True,\n", + " allow_parallel_tool_calls=False,\n", + ")\n", + "agent = agent_worker.as_agent()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: What are the investments of Uber in 2021?\n", + "=== Calling Function ===\n", + "Calling function: uber_10k with args: {\"input\": \"investments\"}\n", + "=== Function Output ===\n", + "Uber's investments primarily consist of money market funds, cash deposits, U.S. government and agency securities, and investment-grade corporate debt securities. The company's investment policy aims to preserve capital and meet liquidity requirements without significantly increasing risk. As of December 31, 2021, Uber had cash and cash equivalents including restricted cash and cash equivalents totaling $7.8 billion. They also hold investments in other companies, including minority-owned, privately-held affiliates and recently public companies. The carrying value of these investments was $12.6 billion as of December 31, 2021.\n", + "=== LLM Response ===\n", + "In 2021, Uber's investments primarily consisted of money market funds, cash deposits, U.S. government and agency securities, and investment-grade corporate debt securities. Their investment policy aims to preserve capital and meet liquidity requirements without significantly increasing risk. As of December 31, 2021, Uber had cash and cash equivalents including restricted cash and cash equivalents totaling $7.8 billion. They also hold investments in other companies, including minority-owned, privately-held affiliates and recently public companies. The carrying value of these investments was $12.6 billion as of December 31, 2021.\n" + ] + } + ], + "source": [ + "response = agent.chat(\"What are the investments of Uber in 2021?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

assistant: In 2021, Uber's investments primarily consisted of money market funds, cash deposits, U.S. government and agency securities, and investment-grade corporate debt securities. Their investment policy aims to preserve capital and meet liquidity requirements without significantly increasing risk. As of December 31, 2021, Uber had cash and cash equivalents including restricted cash and cash equivalents totaling $7.8 billion. They also hold investments in other companies, including minority-owned, privately-held affiliates and recently public companies. The carrying value of these investments was $12.6 billion as of December 31, 2021.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: What are lyft investments in 2021?\n", + "=== Calling Function ===\n", + "Calling function: lyft_10k with args: {\"input\": \"investments\"}\n", + "=== Function Output ===\n", + "The company's investments include cash and cash equivalents, short-term investments, and restricted investments. Cash and cash equivalents include certificates of deposits, commercial paper, and corporate bonds that have an original maturity of 90 days or less and are readily convertible to known amounts of cash. Short-term investments are comprised of commercial paper, certificates of deposit, and corporate bonds, which mature in twelve months or less. Restricted investments are comprised of debt security investments in commercial paper, certificates of deposit, corporate bonds, and U.S. government securities which are held in trust accounts at third-party financial institutions pursuant to certain contracts with insurance providers. The company also has investments in non-marketable equity securities, which are measured at cost, with remeasurements to fair value only upon the occurrence of observable transactions for identical or similar investments of the same issuer or impairment.\n", + "=== LLM Response ===\n", + "In 2021, Lyft's investments included cash and cash equivalents, short-term investments, and restricted investments. Cash and cash equivalents included certificates of deposits, commercial paper, and corporate bonds that have an original maturity of 90 days or less and are readily convertible to known amounts of cash. Short-term investments were comprised of commercial paper, certificates of deposit, and corporate bonds, which mature in twelve months or less. \n", + "\n", + "Restricted investments were comprised of debt security investments in commercial paper, certificates of deposit, corporate bonds, and U.S. government securities which are held in trust accounts at third-party financial institutions pursuant to certain contracts with insurance providers. \n", + "\n", + "The company also had investments in non-marketable equity securities, which are measured at cost, with remeasurements to fair value only upon the occurrence of observable transactions for identical or similar investments of the same issuer or impairment.\n" + ] + } + ], + "source": [ + "response = agent.chat(\"What are lyft investments in 2021?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

assistant: In 2021, Lyft's investments included cash and cash equivalents, short-term investments, and restricted investments. Cash and cash equivalents included certificates of deposits, commercial paper, and corporate bonds that have an original maturity of 90 days or less and are readily convertible to known amounts of cash. Short-term investments were comprised of commercial paper, certificates of deposit, and corporate bonds, which mature in twelve months or less. \n", + "\n", + "Restricted investments were comprised of debt security investments in commercial paper, certificates of deposit, corporate bonds, and U.S. government securities which are held in trust accounts at third-party financial institutions pursuant to certain contracts with insurance providers. \n", + "\n", + "The company also had investments in non-marketable equity securities, which are measured at cost, with remeasurements to fair value only upon the occurrence of observable transactions for identical or similar investments of the same issuer or impairment.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "mistralai22b", + "language": "python", + "name": "mistralai22b" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + }, + "vscode": { + "interpreter": { + "hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e" + } + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-6/Router_And_SubQuestion_QueryEngine.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-6/Router_And_SubQuestion_QueryEngine.ipynb new file mode 100644 index 0000000000000..78ad08cec2097 --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-6/Router_And_SubQuestion_QueryEngine.ipynb @@ -0,0 +1,955 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Router QueryEngine and SubQuestion QueryEngine\n", + "\n", + "In this notebook we will demonstrate:\n", + "\n", + "1. **RouterQueryEngine** - Handle user queries to choose from predefined indices.\n", + "2. **SubQuestionQueryEngine** - breaks down the complex query into sub-questions for each relevant data source, then gathers all the intermediate responses and synthesizes a final response.\n", + "\n", + "[RouterQueryEngine Documentation](https://docs.llamaindex.ai/en/stable/examples/query_engine/RouterQueryEngine/)\n", + "\n", + "[SubQuestionQueryEngine Documentation](https://docs.llamaindex.ai/en/stable/examples/query_engine/sub_question_query_engine/)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Router QueryEngine\n", + "\n", + "Routers act as specialized modules that handle user queries and choose from a set of predefined options, each defined by specific metadata.\n", + "\n", + "There are two main types of core router modules:\n", + "\n", + "1. **LLM Selectors**: These modules present the available options as a text prompt and use the LLM text completion endpoint to make decisions.\n", + "\n", + "2. **Pydantic Selectors**: These modules format the options as Pydantic schemas and pass them to a function-calling endpoint, returning the results as Pydantic objects." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# NOTE: This is ONLY necessary in jupyter notebook.\n", + "# Details: Jupyter runs an event-loop behind the scenes.\n", + "# This results in nested event-loops when we start an event-loop to make async queries.\n", + "# This is normally not allowed, we use nest_asyncio to allow it for convenience.\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NumExpr defaulting to 2 threads.\n" + ] + } + ], + "source": [ + "import logging\n", + "import sys\n", + "\n", + "# Set up the root logger\n", + "logger = logging.getLogger()\n", + "logger.setLevel(logging.INFO) # Set logger level to INFO\n", + "\n", + "# Clear out any existing handlers\n", + "logger.handlers = []\n", + "\n", + "# Set up the StreamHandler to output to sys.stdout (Colab's output)\n", + "handler = logging.StreamHandler(sys.stdout)\n", + "handler.setLevel(logging.INFO) # Set handler level to INFO\n", + "\n", + "# Add the handler to the logger\n", + "logger.addHandler(handler)\n", + "\n", + "from llama_index.core import (\n", + " VectorStoreIndex,\n", + " SummaryIndex,\n", + " SimpleDirectoryReader,\n", + " ServiceContext,\n", + " StorageContext,\n", + ")\n", + "\n", + "import openai\n", + "import os\n", + "from IPython.display import display, HTML\n", + "\n", + "\n", + "# Setup openai api key\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-05-16 05:27:42-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.110.133, 185.199.111.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 75042 (73K) [text/plain]\n", + "Saving to: ‘data/paul_graham/paul_graham_essay.txt’\n", + "\n", + "\r", + " data/paul 0%[ ] 0 --.-KB/s \r", + "data/paul_graham/pa 100%[===================>] 73.28K --.-KB/s in 0.002s \n", + "\n", + "2024-05-16 05:27:42 (46.3 MB/s) - ‘data/paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir -p 'data/paul_graham/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# load documents\n", + "documents = SimpleDirectoryReader(\"data/paul_graham\").load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.text_splitter import SentenceSplitter\n", + "\n", + "# create parser and parse document into nodes\n", + "parser = SentenceSplitter(chunk_size=1024, chunk_overlap=100)\n", + "nodes = parser(documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create VectorStoreIndex and SummaryIndex." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "# Summary Index for summarization questions\n", + "summary_index = SummaryIndex(nodes)\n", + "\n", + "# Vector Index for answering specific context questions\n", + "vector_index = VectorStoreIndex(nodes)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define Query Engines.\n", + "\n", + "1. Summary Index Query Engine.\n", + "2. Vector Index Query Engine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Summary Index Query Engine\n", + "summary_query_engine = summary_index.as_query_engine(\n", + " response_mode=\"tree_summarize\",\n", + " use_async=True,\n", + ")\n", + "\n", + "# Vector Index Query Engine\n", + "vector_query_engine = vector_index.as_query_engine()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Build summary index and vector index tools" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.tools.query_engine import QueryEngineTool, ToolMetadata\n", + "\n", + "# Summary Index tool\n", + "summary_tool = QueryEngineTool.from_defaults(\n", + " query_engine=summary_query_engine,\n", + " description=\"Useful for summarization questions related to Paul Graham eassy on What I Worked On.\",\n", + ")\n", + "\n", + "# Vector Index tool\n", + "vector_tool = QueryEngineTool.from_defaults(\n", + " query_engine=vector_query_engine,\n", + " description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On.\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define Router Query Engine\n", + "\n", + "Various selectors are at your disposal, each offering unique characteristics.\n", + "\n", + "Pydantic selectors, supported exclusively by gpt-4 and the default gpt-3.5-turbo, utilize the OpenAI Function Call API. Instead of interpreting raw JSON, they yield pydantic selection objects.\n", + "\n", + "On the other hand, LLM selectors employ the LLM to generate a JSON output, which is then parsed to query the relevant indexes.\n", + "\n", + "For both selector types, you can opt to route to either a single index or multiple indexes." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### PydanticSingleSelector\n", + "\n", + "Use the OpenAI Function API to generate/parse pydantic objects under the hood for the router selector." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.query_engine.router_query_engine import RouterQueryEngine\n", + "from llama_index.core.selectors.llm_selectors import (\n", + " LLMSingleSelector,\n", + " LLMMultiSelector,\n", + ")\n", + "from llama_index.core.selectors.pydantic_selectors import (\n", + " PydanticMultiSelector,\n", + " PydanticSingleSelector,\n", + ")\n", + "\n", + "# Create Router Query Engine\n", + "query_engine = RouterQueryEngine(\n", + " selector=PydanticSingleSelector.from_defaults(),\n", + " query_engine_tools=[\n", + " summary_tool,\n", + " vector_tool,\n", + " ],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Selecting query engine 0: The choice is specifically related to summarization questions about Paul Graham's essay on What I Worked On..\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What is the summary of the document?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

The document chronicles Paul Graham's journey through various endeavors, including his experiences with writing, programming, and founding software companies like Viaweb and Y Combinator. It discusses his exploration of painting, personal challenges such as his mother's illness, and his decision to step back from Y Combinator to focus on painting before returning to Lisp programming with the development of a new dialect called Bel. The narrative also covers Graham's reflections on his work choices, the transition of Y Combinator's leadership to Sam Altman, and his contemplation on future projects and the impact of customs in evolving fields.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### LLMSingleSelector\n", + "\n", + "Utilize OpenAI (or another LLM) to internally interpret the generated JSON and determine a sub-index for routing." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create Router Query Engine\n", + "query_engine = RouterQueryEngine(\n", + " selector=LLMSingleSelector.from_defaults(),\n", + " query_engine_tools=[\n", + " summary_tool,\n", + " vector_tool,\n", + " ],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Selecting query engine 0: This choice indicates that the summary is related to summarization questions specifically about Paul Graham's essay on What I Worked On..\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What is the summary of the document?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

The document chronicles Paul Graham's journey through various projects and endeavors, from his early experiences with writing and programming to his involvement in building software companies like Viaweb and Y Combinator. It details his exploration of different projects, challenges faced, decisions made, and eventual transition to focusing on painting and writing essays. The narrative also discusses his experimentation with the Lisp programming language and the development of a new Lisp dialect called Bel. The document concludes with Graham reflecting on his past projects and contemplating his future endeavors, emphasizing the importance of pursuing projects aligned with personal goals and interests.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Selecting query engine 1: This choice is more relevant as it focuses on retrieving specific context from Paul Graham's essay on What I Worked On, which would likely provide information on what he did after RICS..\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What did Paul Graham do after RICS?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

Paul Graham started painting after RICS.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### PydanticMultiSelector\n", + "\n", + "If you anticipate queries being directed to multiple indexes, it's advisable to use a multi-selector. This selector dispatches the query to various sub-indexes and subsequently aggregates the responses through a summary index to deliver a comprehensive answer." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Let's create a simplekeywordtable index and corresponding tool." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SimpleKeywordTableIndex\n", + "\n", + "keyword_index = SimpleKeywordTableIndex(nodes)\n", + "\n", + "keyword_query_engine = keyword_index.as_query_engine()\n", + "\n", + "keyword_tool = QueryEngineTool.from_defaults(\n", + " query_engine=keyword_query_engine,\n", + " description=\"Useful for retrieving specific context using keywords from Paul Graham essay on What I Worked On.\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Build a router query engine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_engine = RouterQueryEngine(\n", + " selector=PydanticMultiSelector.from_defaults(),\n", + " query_engine_tools=[vector_tool, keyword_tool, summary_tool],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Selecting query engine 0: Retrieving specific context from Paul Graham essay on What I Worked On can provide detailed information about noteable events and people from the author's time at Interleaf and YC..\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Selecting query engine 1: Retrieving specific context using keywords from Paul Graham essay on What I Worked On can help identify key events and people related to the author's time at Interleaf and YC..\n", + "> Starting query: What were noteable events and people from the authors time at Interleaf and YC?\n", + "query keywords: ['noteable', 'authors', 'time', 'interleaf', 'yc', 'events', 'people']\n", + "> Extracted keywords: ['time', 'interleaf', 'yc', 'people']\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Combining responses from multiple query engines.\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "# This query could use either a keyword or vector query engine, so it will combine responses from both\n", + "response = query_engine.query(\n", + " \"What were noteable events and people from the authors time at Interleaf and YC?\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

Notable events from the author's time at Interleaf include working on software development, observing inefficiencies in managing versions and ports, and learning about the dynamics of technology companies. Notable people mentioned from this time include Robert and Trevor, with whom the author worked on developing software components like the editor, shopping cart, and manager. \n", + "\n", + "During the author's time at Y Combinator (YC), notable events include working on various projects such as Hacker News, transitioning leadership to Sam Altman, and shifting focus towards painting after leaving YC. Notable people mentioned from this time include Julian, who provided seed funding for Viaweb, and Robert Morris, who advised the author about not letting YC be the last significant endeavor.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## SubQuestion Query Engine\n", + "\n", + "Here, we will demonstrate how to use a sub-question query engine to address the challenge of answering a complex query using multiple data sources.\n", + "\n", + "The SubQuestion Query Engine first breaks down the complex query into sub-questions for each relevant data source, then gathers all the intermediate responses and synthesizes a final response." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-05-16 05:36:06-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/uber_2021.pdf\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1880483 (1.8M) [application/octet-stream]\n", + "Saving to: ‘data/10k/uber_2021.pdf’\n", + "\n", + "\r", + "data/10k/uber_2021. 0%[ ] 0 --.-KB/s \r", + "data/10k/uber_2021. 100%[===================>] 1.79M --.-KB/s in 0.01s \n", + "\n", + "2024-05-16 05:36:06 (184 MB/s) - ‘data/10k/uber_2021.pdf’ saved [1880483/1880483]\n", + "\n", + "--2024-05-16 05:36:06-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/lyft_2021.pdf\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.110.133, 185.199.111.133, 185.199.109.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.110.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1440303 (1.4M) [application/octet-stream]\n", + "Saving to: ‘data/10k/lyft_2021.pdf’\n", + "\n", + "data/10k/lyft_2021. 100%[===================>] 1.37M --.-KB/s in 0.01s \n", + "\n", + "2024-05-16 05:36:06 (120 MB/s) - ‘data/10k/lyft_2021.pdf’ saved [1440303/1440303]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir -p 'data/10k/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/uber_2021.pdf' -O 'data/10k/uber_2021.pdf'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10k/lyft_2021.pdf' -O 'data/10k/lyft_2021.pdf'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lyft_docs = SimpleDirectoryReader(\n", + " input_files=[\"./data/10k/lyft_2021.pdf\"]\n", + ").load_data()\n", + "uber_docs = SimpleDirectoryReader(\n", + " input_files=[\"./data/10k/uber_2021.pdf\"]\n", + ").load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded lyft 10-K with 238 pages\n", + "Loaded Uber 10-K with 307 pages\n" + ] + } + ], + "source": [ + "print(f\"Loaded lyft 10-K with {len(lyft_docs)} pages\")\n", + "print(f\"Loaded Uber 10-K with {len(uber_docs)} pages\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Indices" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "lyft_index = VectorStoreIndex.from_documents(lyft_docs)\n", + "uber_index = VectorStoreIndex.from_documents(uber_docs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define Query Engines" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lyft_engine = lyft_index.as_query_engine(similarity_top_k=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "uber_engine = uber_index.as_query_engine(similarity_top_k=3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "response = await lyft_engine.aquery(\n", + " \"What is the revenue of Lyft in 2021? Answer in millions with page reference\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

The revenue of Lyft in 2021 was $3.208 billion. (Page reference: 79)

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "response = await uber_engine.aquery(\n", + " \"What is the revenue of Uber in 2021? Answer in millions, with page reference\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

The revenue of Uber in 2021 was $17,455 million. (Reference: page 77)

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define QueryEngine Tools" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_engine_tools = [\n", + " QueryEngineTool(\n", + " query_engine=lyft_engine,\n", + " metadata=ToolMetadata(\n", + " name=\"lyft_10k\",\n", + " description=\"Provides information about Lyft financials for year 2021\",\n", + " ),\n", + " ),\n", + " QueryEngineTool(\n", + " query_engine=uber_engine,\n", + " metadata=ToolMetadata(\n", + " name=\"uber_10k\",\n", + " description=\"Provides information about Uber financials for year 2021\",\n", + " ),\n", + " ),\n", + "]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### SubQuestion QueryEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.query_engine.sub_question_query_engine import (\n", + " SubQuestionQueryEngine,\n", + ")\n", + "\n", + "sub_question_query_engine = SubQuestionQueryEngine.from_defaults(\n", + " query_engine_tools=query_engine_tools\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Querying" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "Generated 4 sub questions.\n", + "\u001b[1;3;38;2;237;90;200m[uber_10k] Q: What was the revenue of Uber in 2020?\n", + "\u001b[0m\u001b[1;3;38;2;90;149;237m[uber_10k] Q: What was the revenue of Uber in 2021?\n", + "\u001b[0m\u001b[1;3;38;2;11;159;203m[lyft_10k] Q: What was the revenue of Lyft in 2020?\n", + "\u001b[0m\u001b[1;3;38;2;155;135;227m[lyft_10k] Q: What was the revenue of Lyft in 2021?\n", + "\u001b[0mHTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "\u001b[1;3;38;2;155;135;227m[lyft_10k] A: $3,208,323\n", + "\u001b[0mHTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "\u001b[1;3;38;2;11;159;203m[lyft_10k] A: $2,364,681\n", + "\u001b[0mHTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "\u001b[1;3;38;2;237;90;200m[uber_10k] A: $11,139 million\n", + "\u001b[0mHTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n", + "\u001b[1;3;38;2;90;149;237m[uber_10k] A: $17,455\n", + "\u001b[0mHTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n" + ] + } + ], + "source": [ + "response = await sub_question_query_engine.aquery(\n", + " \"Compare revenue growth of Uber and Lyft from 2020 to 2021\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

Lyft's revenue grew by approximately 35.4% from 2020 to 2021, while Uber's revenue increased by around 56.8% during the same period.

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(HTML(f'

{response.response}

'))" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-7/Multi_Modal_RAG_System.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-7/Multi_Modal_RAG_System.ipynb new file mode 100644 index 0000000000000..d4950ea498822 --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-7/Multi_Modal_RAG_System.ipynb @@ -0,0 +1,722 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Multi-Modal RAG System\n", + "\n", + "In this notebook we will demonstrate the following:\n", + "\n", + "1. Querying images with a Multi-Modal LLM.\n", + "2. Buidling a Multi-Modal RAG System." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index-multi-modal-llms-openai\n", + "!pip install llama-index-vector-stores-qdrant\n", + "!pip install llama_index ftfy regex tqdm\n", + "!pip install llama-index-embeddings-clip\n", + "!pip install git+https://github.com/openai/CLIP.git\n", + "!pip install matplotlib scikit-image" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set API Key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Images with `urls`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.multi_modal_llms.openai import OpenAIMultiModal\n", + "\n", + "from llama_index.core.multi_modal_llms.generic_utils import load_image_urls\n", + "\n", + "\n", + "image_urls = [\n", + " \"https://res.cloudinary.com/hello-tickets/image/upload/c_limit,f_auto,q_auto,w_1920/v1640835927/o3pfl41q7m5bj8jardk0.jpg\",\n", + "]\n", + "\n", + "image_documents = load_image_urls(image_urls)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying with `GPT-4V` vision API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "openai_mm_llm = OpenAIMultiModal(\n", + " model=\"gpt-4-vision-preview\", max_new_tokens=300\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The image shows the Colosseum in Rome, Italy, illuminated at night with the colors of the Italian flag: green, white, and red. The ancient amphitheater's iconic arches are vividly lit, and the structure stands out against the dark blue evening sky. Some construction or excavation work appears to be taking place in the foreground, and a few people can be seen walking near the site.\n" + ] + } + ], + "source": [ + "response = openai_mm_llm.complete(\n", + " prompt=\"Describe the images as an alternative text\",\n", + " image_documents=image_documents,\n", + ")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Images from directory" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "\n", + "input_image_path = Path(\"input_images\")\n", + "if not input_image_path.exists():\n", + " Path.mkdir(input_image_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-04-27 20:51:52-- https://docs.google.com/uc?export=download&id=1nUhsBRiSWxcVQv8t8Cvvro8HJZ88LCzj\n", + "Resolving docs.google.com (docs.google.com)... 172.253.63.113, 172.253.63.139, 172.253.63.100, ...\n", + "Connecting to docs.google.com (docs.google.com)|172.253.63.113|:443... connected.\n", + "HTTP request sent, awaiting response... 303 See Other\n", + "Location: https://drive.usercontent.google.com/download?id=1nUhsBRiSWxcVQv8t8Cvvro8HJZ88LCzj&export=download [following]\n", + "--2024-04-27 20:51:52-- https://drive.usercontent.google.com/download?id=1nUhsBRiSWxcVQv8t8Cvvro8HJZ88LCzj&export=download\n", + "Resolving drive.usercontent.google.com (drive.usercontent.google.com)... 172.253.122.132, 2607:f8b0:4004:c09::84\n", + "Connecting to drive.usercontent.google.com (drive.usercontent.google.com)|172.253.122.132|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 160684 (157K) [image/png]\n", + "Saving to: ‘./input_images/long_range_spec.png’\n", + "\n", + "./input_images/long 100%[===================>] 156.92K --.-KB/s in 0.01s \n", + "\n", + "2024-04-27 20:51:53 (12.3 MB/s) - ‘./input_images/long_range_spec.png’ saved [160684/160684]\n", + "\n", + "--2024-04-27 20:51:53-- https://docs.google.com/uc?export=download&id=19pLwx0nVqsop7lo0ubUSYTzQfMtKJJtJ\n", + "Resolving docs.google.com (docs.google.com)... 172.253.63.113, 172.253.63.139, 172.253.63.100, ...\n", + "Connecting to docs.google.com (docs.google.com)|172.253.63.113|:443... connected.\n", + "HTTP request sent, awaiting response... 303 See Other\n", + "Location: https://drive.usercontent.google.com/download?id=19pLwx0nVqsop7lo0ubUSYTzQfMtKJJtJ&export=download [following]\n", + "--2024-04-27 20:51:53-- https://drive.usercontent.google.com/download?id=19pLwx0nVqsop7lo0ubUSYTzQfMtKJJtJ&export=download\n", + "Resolving drive.usercontent.google.com (drive.usercontent.google.com)... 172.253.122.132, 2607:f8b0:4004:c09::84\n", + "Connecting to drive.usercontent.google.com (drive.usercontent.google.com)|172.253.122.132|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1308468 (1.2M) [image/png]\n", + "Saving to: ‘./input_images/model_y.png’\n", + "\n", + "./input_images/mode 100%[===================>] 1.25M --.-KB/s in 0.03s \n", + "\n", + "2024-04-27 20:51:55 (38.8 MB/s) - ‘./input_images/model_y.png’ saved [1308468/1308468]\n", + "\n", + "--2024-04-27 20:51:55-- https://docs.google.com/uc?export=download&id=1utu3iD9XEgR5Sb7PrbtMf1qw8T1WdNmF\n", + "Resolving docs.google.com (docs.google.com)... 172.253.63.113, 172.253.63.139, 172.253.63.100, ...\n", + "Connecting to docs.google.com (docs.google.com)|172.253.63.113|:443... connected.\n", + "HTTP request sent, awaiting response... 303 See Other\n", + "Location: https://drive.usercontent.google.com/download?id=1utu3iD9XEgR5Sb7PrbtMf1qw8T1WdNmF&export=download [following]\n", + "--2024-04-27 20:51:55-- https://drive.usercontent.google.com/download?id=1utu3iD9XEgR5Sb7PrbtMf1qw8T1WdNmF&export=download\n", + "Resolving drive.usercontent.google.com (drive.usercontent.google.com)... 172.253.122.132, 2607:f8b0:4004:c09::84\n", + "Connecting to drive.usercontent.google.com (drive.usercontent.google.com)|172.253.122.132|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 62818 (61K) [image/png]\n", + "Saving to: ‘./input_images/performance_spec.png’\n", + "\n", + "./input_images/perf 100%[===================>] 61.35K --.-KB/s in 0.007s \n", + "\n", + "2024-04-27 20:51:55 (8.12 MB/s) - ‘./input_images/performance_spec.png’ saved [62818/62818]\n", + "\n", + "--2024-04-27 20:51:55-- https://docs.google.com/uc?export=download&id=1dpUakWMqaXR4Jjn1kHuZfB0pAXvjn2-i\n", + "Resolving docs.google.com (docs.google.com)... 172.253.63.113, 172.253.63.139, 172.253.63.100, ...\n", + "Connecting to docs.google.com (docs.google.com)|172.253.63.113|:443... connected.\n", + "HTTP request sent, awaiting response... 303 See Other\n", + "Location: https://drive.usercontent.google.com/download?id=1dpUakWMqaXR4Jjn1kHuZfB0pAXvjn2-i&export=download [following]\n", + "--2024-04-27 20:51:55-- https://drive.usercontent.google.com/download?id=1dpUakWMqaXR4Jjn1kHuZfB0pAXvjn2-i&export=download\n", + "Resolving drive.usercontent.google.com (drive.usercontent.google.com)... 172.253.122.132, 2607:f8b0:4004:c09::84\n", + "Connecting to drive.usercontent.google.com (drive.usercontent.google.com)|172.253.122.132|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 128556 (126K) [image/png]\n", + "Saving to: ‘./input_images/price.png’\n", + "\n", + "./input_images/pric 100%[===================>] 125.54K --.-KB/s in 0.01s \n", + "\n", + "2024-04-27 20:51:56 (10.9 MB/s) - ‘./input_images/price.png’ saved [128556/128556]\n", + "\n", + "--2024-04-27 20:51:56-- https://docs.google.com/uc?export=download&id=1qNeT201QAesnAP5va1ty0Ky5Q_jKkguV\n", + "Resolving docs.google.com (docs.google.com)... 172.253.63.113, 172.253.63.139, 172.253.63.100, ...\n", + "Connecting to docs.google.com (docs.google.com)|172.253.63.113|:443... connected.\n", + "HTTP request sent, awaiting response... 303 See Other\n", + "Location: https://drive.usercontent.google.com/download?id=1qNeT201QAesnAP5va1ty0Ky5Q_jKkguV&export=download [following]\n", + "--2024-04-27 20:51:56-- https://drive.usercontent.google.com/download?id=1qNeT201QAesnAP5va1ty0Ky5Q_jKkguV&export=download\n", + "Resolving drive.usercontent.google.com (drive.usercontent.google.com)... 172.253.122.132, 2607:f8b0:4004:c09::84\n", + "Connecting to drive.usercontent.google.com (drive.usercontent.google.com)|172.253.122.132|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 160998 (157K) [image/png]\n", + "Saving to: ‘./input_images/real_wheel_spec.png’\n", + "\n", + "./input_images/real 100%[===================>] 157.22K --.-KB/s in 0.01s \n", + "\n", + "2024-04-27 20:51:57 (12.5 MB/s) - ‘./input_images/real_wheel_spec.png’ saved [160998/160998]\n", + "\n" + ] + } + ], + "source": [ + "!wget \"https://docs.google.com/uc?export=download&id=1nUhsBRiSWxcVQv8t8Cvvro8HJZ88LCzj\" -O ./input_images/long_range_spec.png\n", + "!wget \"https://docs.google.com/uc?export=download&id=19pLwx0nVqsop7lo0ubUSYTzQfMtKJJtJ\" -O ./input_images/model_y.png\n", + "!wget \"https://docs.google.com/uc?export=download&id=1utu3iD9XEgR5Sb7PrbtMf1qw8T1WdNmF\" -O ./input_images/performance_spec.png\n", + "!wget \"https://docs.google.com/uc?export=download&id=1dpUakWMqaXR4Jjn1kHuZfB0pAXvjn2-i\" -O ./input_images/price.png\n", + "!wget \"https://docs.google.com/uc?export=download&id=1qNeT201QAesnAP5va1ty0Ky5Q_jKkguV\" -O ./input_images/real_wheel_spec.png" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from PIL import Image\n", + "import matplotlib.pyplot as plt\n", + "import os\n", + "\n", + "\n", + "def plot_images(image_paths):\n", + " images_shown = 0\n", + " plt.figure(figsize=(16, 9))\n", + " for img_path in image_paths:\n", + " if os.path.isfile(img_path):\n", + " image = Image.open(img_path)\n", + "\n", + " plt.subplot(2, 3, images_shown + 1)\n", + " plt.imshow(image)\n", + " plt.xticks([])\n", + " plt.yticks([])\n", + "\n", + " images_shown += 1\n", + " if images_shown >= 9:\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABKYAAALJCAYAAACObCtTAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd2Ac9Zn/8ffM9lVbdVnFTe4FF1wB00wPYHpLCIRwCSEJIeQgQJLf5S4hHEdCEkoqCST03kIvBmOMMdjGttxlW7Z63yptnfn9sbuzs5IMhgCy8fO66Han7OzsrKxFHz3f56vouq4jhBBCCCGEEEIIIcQXTB3uExBCCCGEEEIIIYQQBycJpoQQQgghhBBCCCHEsJBgSgghhBBCCCGEEEIMCwmmhBBCCCGEEEIIIcSwkGBKCCGEEEIIIYQQQgwLCaaEEEIIIYQQQgghxLCQYEoIIYQQQgghhBBCDAsJpoQQQgghhBBCCCHEsLAO9wmI4aNpGi0tLeTl5aEoynCfjviC6bpOIBCgsrISVZWMWogDnfxMF0LIZ7sQGfK5KMTw+iSfSRJMHcRaWlqoqakZ7tMQw6yxsZHq6urhPg0hxL9JfqYLIdLks10I+VwUYn+xL59JEkwdxPLy8oDkN0peXh49vT6279iNpumAkvqfAooCKMZfGhSSy0Pvg7HN+MOEkt6fQcfKup/eNuT9vawbtJyRWfVJ/kKioydvBqxOrU9v0PXM3sYD9NRq3fSYzPa97qvrqVvz8w88hvn59Mx+qeXBz5PcWlxUwLixo7BYBifUfr+fmpoa4/tACHFgk3/LQog0+XkghPw7EGJ/sS//FiWYOoilA6H8/Hzc7hxu+PntvPDKcpJhkQVFTX0pFtTUraKq2etUC4qSWZferiqW7HWDtqmDtynpY5nvp27TX6oK5mVFMS0rQHqdYgRmSipA23tApaf+lw6IdHRTIKTrmnE78H7mK4GumZY18zrTra6ha+bbzDbN2DeRtY+W3sfY37TNvC51XzPtW1SYx4N/u4XJk2o/9vtACHFgk3/LQog0+XkghPw7EGJ/sS//FiWYEgBoukb9jj0kEolU6JSqutF0UHU0XUdFA10xKnY0XUPVU9VQeibUyVTwaKltCqQfq+loavpxGmgKqKZblNRzpu9rRot+HVA0UwGWArqiGMvp+zoKiq4kl1O3yWoiUxVXmp6pPUpWMWWql4wQCi25Ph1YGQGVOYTSM19GQKUPvtXM++gDvrTM9kGPMR8z+Xht4DF1HU3Ts/br6u6ls7uXyZ/rd48QQgghhBBCCPHpSDAlDJmAJjNkTFFMVUQKKOnwSUkNJNMBRUcxrUvumxpaZmwj8zjdFAApetb6rHWkUqjUeiU9BC69D3rysKkQTEFFR0NBQUdFQU/tkRrulzrCEC8889qzKqWS55AJpTKBFOZAyRTIJcM40zIDb1PbGRhKDbW/qYprwPkNHhaom/bFdA6mVUIIIYQQQgghxH5GgilhYg6JSFYhYQ6dUn2RFJLrTPvq6QDKHK6kQ6gB4ZMyVBg1KJRK39fSZ5akKChomZZMqCho6KiQFUppyVBK0ZNVU6n7yeqtga85eZu8yQQ92SGVeRhf9jK6lqqqMlU0kV25lFlnCqIGhVbm/TLhYDpgygrBTO9XZghierUpvJJkSggxBFVVsVgsw30a4lPSdZ14PG4sW61WqqurcTgcw3hW4tPSdZ3Ozk56e3uH+1SEEEKIYSHBlEgyso3sICodUA0Om5LrlKx1mRBEGbC/+VbfW4WUEUQp2dvJVG7pukZybJ+eqohKLiuKhq4rqW2pYxhD+oyUbXCTdKPqCIxAyhRMZSqbBveZyizvpXLKOIb5fvatEValQ7+hwioYEGQxROUUWcc03k/dXAcnhBBgs9nweDypvnbyE+JApKoqsVgMn8+HxWLhmmuuYfz48fh8vuE+NfEpqKpKWVkZv/zlL9myZctwn44QQgjxhZNgSpikq5+yg6V00JFV6ZQOQYwqKIx12eETgyukhgylUj2oGHAfU8UV5uF8yUoqJRVEJaukwBjKp6R6W6Vv0ZNN14b4JcwIhNKvPyvgGTCUz3j+TCiF6SurKopM4JS5n3079HA/87HM55YdSg1VOTVwiJ8QQgyUk5NDIBAgHA4P96mIf0NpaSkWi4WysjImTZrEFVdcQTQaHe7TEp/S4sWLOfvss7npppuG+1SEEEKIL5wEU2KAdJUQyWFwWWETRvXUwL5R6YBmqBAqq9pnyKF6yoB1qSojYyhedhgFpPpJpfdTTbeZl6EoerKKKl0lNWgYn3n37KAnu3IqUxWV7jmlp3tJGUP4Ml9ZFVIDK6b2qXJq6NBq8DZSgZm5WousY0o4JYQYSFEUNE37+B3Ffi39HtpsNvr7+yWUOsB1dXXhdruH+zSEEEKIYaF+/C7iYDEoECGddZjXZVcVMWj/vYQrRnAy1OMGrDPWDwh8zJVJpibkoDOixI3LqabWaVgtYLUoWK2KEa7pqV5QiqJjsyooqnlonobFomC1YARPuq6hoGO1KlhUyAql0oGQOQAyzs1cUaVlLw+sqBoyhCITRpn2S20YXDllVEklt2dVTskwHSGEEGK/J8NqhRBCHMykYkpk05OFRYObmw9cpw+eeS+rIgrTPgOqoVL7G8MASd9P/QV/iJn40oFLchhhqjpK17HZ4NRjajnusNE0tQV4/JXt9HgjfOfCGUah1OMv17N9d7LvRq7LxlknjGNUZR7tXX089nI9Xn+EqeOKOOWoUdisKsveb2b56mYsFpWvHDWaaRNKCAQjPPriFppafZmQa+AMfZhCpqECub0FcAMro7LCKFMllD54H3OVVGYdmXWf6zeLEEJ8tqxWK4qiEI/HP/IXdbvdjsPhIBAIfIFnJ4ZitVqzGrF/Gjabzeib9VlW81mtVjRNQ9M04zxVNfk32fS69K2u68Risc/suYUQApJVyvsSPO/rfkJ8WUnFlDAxDy0bXBVlrvbJDlTIrgQaGL4MNXTN3EB84L5G4GNuLJ4eIpfu65RcrirLZf6MEazf2sF761qIReO4HBacDpV7n9rI3x6vY3ezn/TQu5MXjcLlsPCXRzcQjsQ4+/ixFOXb+eppE3hl+W4eeX4rpx0zhlGVucw/pJwptUXc/eg6tu7s4dKzpmGzKph7S2UqqMznPGCGPiPIMt0fcL2GDKMgE0SZ/y8VOqX3yX5c5n00N3AXQoj9ndPp5IYbbuSaH/2IK75zJbm5uSiKgtWa/BuaoiioqorVauXb376Ck046ydiupP4Soapq1pcltc18jIHHs9lsxjlYLBYjuJCZCz+ey+XiuuuuM64pJK9hejl9Dc3XOB1Cpd8zp9PJnXfeya233srvfvc7xo0bh6Io2Gw2Yx+LxYLdbjeWrVZr1nuVXjZTFIXrr7+es846i/z8fG644QbsdjsXXHAB5557Lg6Hg+uuu45DDjmEe++9l1tvvZUf/OAHMpxOiIOQ3W5nzpw51NbWDtpWW1vL3Llzsdvtn/i4TqeTa6+9loULF37kfjabjSuvvJKTTjopa73FYmHWrFlMmTLF+PkHyZ+tM2bMYMqUKcbPQbOysjIWL17MMcccQ01NzZD7CLG/kYopkZGuyjHNyKcr5ln6jJ2SeYeSPStfVl8pc7XQgO2Kkr1NT/eZMldVka6Y0lItqNSsdXqqr5TX3088rjFtfCm7Gn20d4UoLXZTVODk62dMoaunn8df2kYkGsdqUZg2oYj7n91CW2eQ197dw1UXz2TcqHxCfTHWbelA0zS27Oxh0thCams8vLOmieY2P4FgmGMXjsST56C9K2YKoMwB1cBKpvRrGWJWvr1VUA1RFTUo6BsQFGYCRbK2mUqnhBBiv5cONG6//fd89atfY+q0aYwfN56S0hJWvrsSl8vJzFmz2LZ1KwsWLuCZp5/hiCMWMW/+PEKhEI89+igXXnQR8Xic7du2M3v2bNw5blpaWqisrOKev/+NkSNHsvCww9i1cxfr16/jrLPPxm6z89LLL9HV2ckZZ55JuD/MU089xemnn47L7eKFF15g29atw3159ksWi4Vx48YZv/RUV1fz3e9+F4fDwUMPPcT48eOZOXMmRUVF/OIXv+CII45gxowZWK1WHn/8cVasWIHFYsHpdHLjjTdy6qmncvzxxzNq1ChOPfVUdu7cyb/+9S/+8z//E0VReOONN9i+fTvf+MY38Hq99PT08PLLL/ONb3yDWCzG73//e7q6ugAYOXIkNTU1VFdX89JLL1FYWMiECRM4/vjj0XWdLVu2GOHnrl27uPnmm/mP//gPzjrrLO6///7hvKxCiC9YaWkp999/P36/nzPOOIOWlhYAKisreeihh8jPz2fx4sU0Nzd/ouPa7Xa+/e1v09fXx7vvvrvX/Ww2GxdffDFLly7lpZdeytp25plncsEFF7BkyRI2b94MwIIFC3jwwQe56aabjHVps2bN4gc/+AGrV6/GYrFw6aWX8tprr3Hfffd9onMX4osm8akwGEPRPqIaaq/bjJAEhgpRzLfmKqjMrHZa9uMGVhGl9tFNFVO6rtPj6+PWv65g++5ujp5fwwVfmURrR4Bf/20V9z65AYdd5ci5VVlVTBYVdE1LRluaTiKhoaoYx1RVSCQ0NF3HoipGtRWQml7d3PNKG3COg6vLsvpJpY6lD9xv4O1eqs2S2aEp/BqickofsE6yKSHEgaK8vJxvXn45+QX5JBIJ5sydS2tLK8ceeyzl5RX09vSy9M032bZ1G8uWvcXRxxzD3+6+m57uHubMmcu4ceN4+aWXcLldbNu2jU2bNtHd3c2G9euYMnUqNpuNjvYOFh15JEVFxfi8Pp555hnmzZ3Hyad8hRdfeIH77vsn48ePY+TIkfi8Po468qjhviwHjLPPPps333yTP/7xj1x88cVUV1fz5ptvUldXx4IFCzjuuOO45ZZb6OrqoqioyHhcVVUVN998M6eddhqvvPIKDoeD5uZmTjjhBMrLy0kkEtx5550cc8wxnHnmmTz++OO89NJLjBkzhgsuuIB4PE5+fj4LFiwwjnnOOefQ3d1trF+6dClf//rX2bNnD52dnXz1q19l6dKlaJpGPB4nGAyyYcMGqqurh+PSCSGGkaIoOJ1OZs6cyTXXXIPVasVqtfKjH/2ImTNn4nK5jIolRVEYOXIkRx99NPPmzSMvLy/rWIWFhRxxxBHMmzeP3NzcQc9TU1PD0UcfzaGHHorT6fzI80okEvzxj3/E5/Px//7f/8PlcuHxePjv//5v1q9fz0MPPWT6/StZSXXuuefyz3/+kzvuuIPf/e53fO9732Pp0qUAOBwOCgsLcTqdTJkyhfHjx2dVtAK43W4mT57M2LFjs6phITmr75QpU5g4cWJWBVlRURHTpk1jxIgRUp0lPjWpmBImeibnGFjdtK+VUuYwxXwMI0TR0FFSM+ZlekgN6jGFhoKS2j9NTfajSvWYUtCoqSxk4awa4nGN7Q09lBS6GTkij0m1xWzd2YPNqhKJxpk4xkMgFOODujZOOWoMkWic4w8fRd22Lrbu6ubM48dxxJwqvP4w40Z6eH7pdnp9/Zx67Dh2N3uZMamMlo4APb4+I5TSSYVS2lBD+DTT8MMBlU5GQKcxMHga6jb58L0MzRuycko3tkkoJYQ4kHR2dnLffffh7e3F4/EQCATo6+tj2bK3qBgxgq3bthKNREhoGuFwmL6+EPMXLGDkqJG8/vrrdHd309LSwpQpUwiE+rBFbOiahqbp5ObmcvjhR/Dqq68wc+ZMFAWCwSD+gB8dnfa2dg49dA7VNTX09vYSiUbo6elh584dw31Z9mtFRUWcccYZBINB2tvbWbhwISNHjqSxsRFN0/B6vfT29hKLxaivr+fqq69m6tSpfPjhh8Yxmpqa+MUvfsFPf/pTXC4X5557Lk8++SSHHXYYiqLg9/vxer0kEglWrVrFhRdeSDgcBmDXrl2MHz+ejRs3GsdM/5L0/PPPs337dk477TRuvvlmfvazn3H99dfjcrm48cYb+dWvfsWoUaMYP348F110Ecceeyx33HHHMFxFIcRw0zSN119/nQsvvJAXX3wRi8XC+eefz6uvvsrkyZOBZJXoxRdfzHXXXQckg54tW7Zw1VVXsWPHDmpra/nLX/7C+PHj6ejoYM+ePeTn5wPJUOqUU07hpptuIhKJkJ+fz4oVK/jhD3/4kX36Wltb+X//7/9x7733cu6551JeXs64ceOMn7sDRaNRxo0bx7vvvkt/fz+BQMDoxzht2jQuuOACo59eXl4e0WiUm266iZ6eHkaPHs0Pf/hDenp6yMvLw+fz8Zvf/Ia+vj6mT5/Od7/7Xbq6uigsLCQajfLf//3fVFZWcu2117Jnzx48Hg9Lly7l6aef/izfGnGQkGBKZKSHgpmH2aEnR/ENCKH0AY3MkyPxPiKsQjOamptDKV1JDuNTssKszFC9TBiVOgagoBrhVFd3EH8gzOGzq2lo9vHQc3WEwjFKPE4OOXkija0Blr2/h7NPnEhrR5CXl+3CoiqcfcJ4drf4eO6NHYT6o/z1kXWccvRYbBaV+5+to7HVT0u7n1y3lTOOn0BXTx9/f2wNsVg8E0qZKr2yq81M/bMGVEuZq6vSlU1ZPaeGuN3bsL+sZudZVWqmqipJpoQQB4hwOMyDDz5AV2cnuq7T2dnJ3+6+m9GjR7N161Z27dpFf38/iUSCxx97lGAwyN/uvpsZM2fyr+f+RX39dvr7+ojFYqxevZpEIoGqqsZfk1VVZdOmjRQXF3PvvffS2LiH5uZmurq7efHFF2lqamL2rFnEYjHWr19PKBSipKSEhoaG4b0w+7H+/n7uvPNOcnNz0TSN5557jubmZnJycnjwwQeprKyku7ubpqYm4vE40WiUsWPH4vV62bEjGfiFw2HuuOMOmpub+dWvfoXNZuPXv/411dXV/PrXv2br1q34fD56enr429/+RiwWY/v27YwYMYK6ujoeffRRDj/8cHRdp6OjA0j+gnnrrbeyadMmVFVlw4YN9PT0cNVVV7F+/XpUVaWrqwuv14uu6zz88MPGL1mNjY3DeUmFEMNE13Weeuop2tvbueWWW1AUheeff55169YZwdT06dP5r//6L+666y7+8Y9/MGLECP70pz/x85//nMsuu4yrr76a8vJylixZQlNTE5dddhmnnHIKACUlJfziF7/glVde4X//938ZPXo0jzzyCKeffjpPPvnkR57bq6++ygMPPMAvfvELbDYbN998M+vXrx+0n6Zp3HPPPVx//fUsWrSI9evXs2zZMtauXUs0GsVut3PkkUdyxRVX8OGHHxq9rS699FJ+//vf873vfY9nnnmGpUuXYrVaueGGGzjhhBN4/fXXueaaa7j99tv58MMPsVqtzJw5k1gsxmGHHcb777/Pn/70J+x2+6AqKyH2lXznCJN0EKUbLZ/MAcreQih9iDAKPRM46QyumgIlO7QCUs2kAFMlVSqQUgAU1dg/HU6F+qO8+NY2drd4aWoLEAhFAYV7n1yHqqpoOlhUhd3NPrbs7KY/HOWJl7agqgqalglt6nd3c/s/upNXIVX1pGnwyts7eHV5PZqWqXBKB0mYqqMYsG2vw/EGVEsZwdJHVk5l9h26SXom6EoeavDzCSHE/i6RSAzqldHQsIuGhl2D9t2yZQsAXq+Xt958c9D6tra2IZ9j4Pqenh4AdqZCkpUrVxrbNm/a9AlfwcEnkUjw2muvZa17++23jfvbt28HMtdZ13W8Xi/PP/88O3fuNI6xevVqAHbv3m08duPGjcb99OPXrVtHTk4OK1asIBaLUVdXRywW44033sg6B6/Xi9frBZK/qH3wwQdA9vv73nvvAeDz+Xj22Wc/5RUQQnyZ9Pf388tf/pJ//etfxGIxbr75Zo499lhj+/z580kkEvztb3+jt7eXzs5O7r//fq666irKy8tZuHAh//rXv/jwww/RdZ177rmH73//+0Cyifro0aMZNWoUN954ozEZx/z583nmmWc+8rzi8Ti//vWvOeWUU6ivr+cf//hH1hA+s927d/ODH/yAkSNHcuihh/Ktb32LHTt2cMsttwDJn4MbNmxA13Wi0ShPPfUUt9xyCx6Ph6lTp7Jt2zYqKyuBZPP2OXPmUF9fTyKRMF5XLBbj/fffB+Cdd97hJz/5CVVVVbz88svGz1YhPikJpoTBKLwZVA2VGRo25LahbsEInhTFFKQMbISOaT0a2S2RFBQlWRmlo4OmoaiQbISumSqnFDZua0dRkmOak9OtKiSS4w5J6PDmew2Yf34nEoNfvZ5VMYYRyGmmCiWGDKVMFVFGFdXA4X0DZuIzDetLvgZTuDToNn0uGPuZh+wNqqwyb/u3vyuEEEKIz0Zrayutra3/1jFCoRCrVq36jM5ICCEydF2nvr6eG2+8kWg0OqhiNhKJYLVayc3Npbe3F0VRyMnJIRaLEY1GCYfD5Ofno6oqiUQCt9ttVBBFo1FisRjbtm0zgvfVq1eza9cuNE0beCqDtLe309zczI4dOwiFQnvdT1EUotEo9fX11NfX8+qrr/KHP/yBkpISgKyZbCFZTRyPx9E0jVgsRktLC36/H4A9e/bQ3t5uzIib/B0r+7eLzZs3c+WVVzJz5kzOOuss5s6dy29/+9t9ek1CmEl3MpGx1yqcoSt59jrsbIhqn+zAJBPsZB1nUJWRZlqXDnY003pz+DNwSJxm2q5nNS3Pako+oKG6OWRKHzvrdshQai8z9Jlfs5792jPVUvqA5/yoyqmhr7s5iErvZ1RO7eWvKUIIIYTYf5h/URRCDK+nnnqK559/ftD6t956i2AwyM0338yiRYs4//zz+eY3v8lTTz1FV1cXzz33HEuWLOHSSy9l0aJF/PznPzcmeti+fTtr1qxh0qRJbN++ncbGRiZPnkxzc/Neq58+Kbvdzo9+9CMOO+ww3G43drudiooKEomEEWYtWLCAhQsX4nA4yM3N5fzzz2fZsmX4fD7WrVuHy+Xivffe47333iMUChEMBtmzZw+JRIJjjz0Wh8OB2+3m+OOPp6SkhEmTJuFyuVi5ciV/+tOfmDBhwqCG6kLsC6mYEibpCieyq5+MSqnkPkal0xCVUrrRSyrTUyorpMoarqehK5n+UkaQo+im50k2P7dYrFitKqAQi6fXK4CKqijY7SqRaAIFBR0l2TiddESjYLOooCjE4xqqBXRNz8psdAYEOzpkKrgy5+WwqoQjiUGhFOnwLGt5QHA3MDwzB3fm62QOz7JCqHQwlnkfjLBqwP6YrqlEU0IIM03TsFqtRKPR4T4V8SkpimL0zwqHw+Tl5ZGXl/eRf0UX+7cxY8bQ29s73KchxEEpkUjQ3Nxs/Aw1B0XBYJDm5mZjnyuuuIKf/OQn/OUvfyEajfLoo49y2223oWkaf/7znykpKeHaa68lHA7zzDPPsHbtWqMB+TXXXMPPf/5z7rnnHuLxOMuWLSMYDKLrOm1tbR/5M0DXddrb2+nu7t7rPrFYjFdeeYVLL72Uyy67jL6+PpxOJ3/605+MRukrV67kmGOO4ZxzzsHtdlNfX8+DDz6IpmnccccdXHXVVSxcuBBIVqjefvvthEIhfvWrX/GDH/yAJUuWYLFY2LVrFytWrKCmpobvf//7RCIRPB4PDz30EJFI5LN4W8RBRtE/q4hWHHD8fj8FBQX4fD6cThcnnX4pm7buQFEsKKqKolhQVQuKakFR1Mz6rOXUfVVFzVpO76MaxzLfx7if3qaYltXkdtPXcYsmUVNZSEdXkGWrduFy2vEGwjjtNspK8pg6oYLX3qknP9eJNxDB7bKjaTp2m5WEplMzIh+b1cL6Le0cfuhIdjb20h+Ooes6/mCEvBwHuq7RH4mTn5M8tsthxWpR6AtHWbywlpVrdzOq2sPm+nbC4Sh2mwVVBQUdX6Av+SGWVZmVDpgSybBp4DZNQ9cTQ9wfuC6ReayWWdaM9ebbhOnxyduH77udoxfN/8j3Pz1jiBDiwJX+N/1xLBYLhYWFUqFxgOvv7ycYDKKqKpdccgmLFy+WsPEApSgKsViM//mf/6GpqekzOaZ8tgux75+LiqLgcrmIRqODZsizWq3Y7Xb6+/uNwMrhcODxeAiHwwSDQRKmHiEWiwWPx4OiKPT29uJwOIjFYsZMeHa7HY/HQywWIxAIGM/ncrlIJBIf+XPc6XSi6/rHBj9WqxWXy4XL5cLv9xuzmC5cuJBTTjmFX/ziFxQWFhKLxfD5fFnnb7Va8Xg8aJqG3+/Puh7pc49EIlmv2+l0kp+fb3wuSbwgBtqXzySpmBIGY2hZujpKSQ83SzUyN1UyMfC+zoAm6KmKKXNlj6kBelYzdDT0ZPMo0kP+Ms3Pk+OT83OdRKNxOroCnHTURAryXexq7GVkZSGhvij5eU5OO3YS+XlOduzuYf6sGt56bxflJXnUjizi/fXNJBLJcKcgz05FSQ5HzhtFqD/GitV7OGRSOY2tPkoK3eTnOmhu9zNhTDHBUITtDd3MnFLBrqYuRlV5GFXpYVdjN+UludRUJn9wP/78Grp6AkaNUmZIoml44KDKKXO11BCVU0YFFQMqp0zv1aDb1Hs5cCihEEKkJBIJuru7JZg6wKX7d2iaxr333suTTz4psyEdwILBoFQZCDFMdF2nr69vyG3xeHxQWBWJRGhvbx9y//RnbNrA40ajUWMGUbP+/v6PPc90wPRx4vG4UaVl5vP52LVrF7FYbK/nH4/H6erqGnLb3s49HA7v87kJsTfyXzDCJNP3SDEN4wMyM+iZQhDFHDYpGuhK5tYcmKBnr1c0U1aiZBqpG2FUetgfqQbnenK2pu3d7GnxMnfGSLz+MNFoDJtVZU9rL1NzKygtzqHb20d/OEp3b4imNh/TJ1agquCwq4T6E0YVk6Lo7NzTQzgSZ0RZLv5gmKZWH5NrS+j199PXF8EXCPP+ukZqRhTQ1hkkEAhjs6hs2dHOSUdP4plXNjB1fDk7G7tSPaxMQ/vQUn2udDL9p9JNzk1D/chsH9hryzzsj6wvLXt54HA/4z0yDVEUQggTRVGwWCzDfRriU0r3TjQv+3y+YTwjIYQQ+7vNmzezdetWqWgS+yUJpkSGjlHxpOs6mHpNDZ6BL10hZeolZe5Dld6WCp4GVk0ZodagmfiST4uiGj2YFOCDdQ30+sP09cd44sUPqR1VypYdHbR1+CkpymXZezvoC8cYXV3Etl1ddPWG6O4Jsm5TMwlNp7UjQDyRfO4PN7XQH47R1uEnoen09UcZXV2Iw67y1Mt1jB2ZPEZzu4/u3iBeX4jmtl4Smsa7a3bR0u4jEo2yeXsbupbAalHw+vuyA6dBPaSGaNA+qLeUaf+saqdMs/RkAJXpHpV5HnNllCngk88dIcQANpsNj8eTVbovDizpWZS8Xu9wn4oQQogDhK7r8tkv9lsSTAlDVgPwAaGTPqDJuWJUSJG1HsxBVfKomVAq+Rg9NZRPNwUwijlMST13epiJrus0tvagkOw91d7pp70rkBy77etjx55ukkMNFdo6A0AqKELh/fWNqW2QOiGaWr0AZIpsddZu7DPut3f5QYeungDoOl5/Hy3tXtKVSeg6dVta0NGp29qcFTLttfm5OWgyD2Ecqlpq0P2BQ/fSxyf7mFnVVpieRwghMnJycggEAlJ2f4ArLS3FYrHILxlCCCGEOOBJMCUy0gGLEUIxRKVUuqKK7KqoIfpIJbdBdtWUlsmykvcyoY2Snu1PQ0clOZxPRVE0dF3NPDY1656OmlpOhli6roCxDeN+8nxTwwj3/uKzh76lqo/01GvQU8PwdFMABJqxPHBGPn3AcmadeZuedcyhZ+gzBVGpdXr6vWJAqDUwvEofW8IpIYSJoihZw8DEgUneQyGEEEJ8WUgwJUzSoQZGkDS4UsocoqR7Rg0IrsxVUwPCKp0B+5EeumcaiYY6oPm5imI0SE8FYel1RhClGI9L3ley7meHM+aGv6b16TDKNCxOzwqEMgFTZmhdprn5wAAqezkdRGX3hzKHU9nVUqYhf2SeSzeCuwEVU6ZbY7ifhFJCCCGEEEIIIfZzEkwJQzrMyAzTGxg4mSpwBg7hG6KPVOZ+pteUYg5SBgRVA2fig3Tzc1M4hZK1ziy9PV1Tlb6fHnKomPY0v2ojujH1aNJNoY8+8Na4FuZQKlMhpZuH6ZkrooYaqqdrqeuzl2qprPVkbzd6T5F13MFhlRBC7B8URcFmswEQi8X4pA1YVTX5c1/TNKxWqzFTUno670gksk9D23Jycujv7zeqjux2O7quG9N5m8/X6XTu02xJQgghhBDi05FgSmQYgQtZgZMxY15WCJXd9FyFAT2lBlZNDZ6JLxkgmUIf00x86cosHc1Y1vX0ITUjnEoP4csEUYpRK2UeypcsMEoHUoN/EVIU0MwVRllD45LPmTy3TOWSkg7WBgzNMwdQ6dApM4zPHMxlgq3sWfWyG6RnVVHpmZ5T2c8xuHIqE5p9Nt8eQgjx7xo5ciRXfvd79Pb0EAgGuOfvf6e/vx+r1WqEQjabjUQigaZpxsyB6bBpypSpjBk7hhdfeIGrr/4hjzzyMKqqMn/+AlxuF8vfXs7u3Q3E43EURck6rsViQVEUEokEF3/9Ep584gk6OtopKSnhP6+9lp7uHnp7e3nkkYcJBoNAMsA6dvFinv/Xv4xzSH7u6FnHFkIIIYQQn54EU8JkQPWTeTjfENvKSwqZPm0Cy99dx5zZU9B1hVVrtmCzWXC7XfT1R3E67OgoaDooikq6Wik318mcmRP4sK6B2TNqeXvlFtwuJ6H+KC6XjQljK9m5pxNQ0HUYN6aCuq0tzJhSQ0NTDzUjiti4rZX5s8aQ43bQ6+sjntDp9fVRkOciFtfIzXFQUphLtzfE+s0taJqG2+0gGo1jsahYLCr9/VFy3HYmjC1lzYYmJowtJZHQUJRkWLWrsRuLqtAfjuJ22ZhUW866TY1YLQrlJfl4/SH6+6OMHVVCjzcIukZrh5eJY6uo27IHRYEct5O+vn7sNiu6rhEOR3A5HUQiUcKRCAvmTMKTn0Nbezcbt+4CXSMajeJ0O+jr78eiWrGo0Nffj8vtoK+vn1gsPrjCigFBlrkqTQgh9gNOp4umpkbu+fvf+f73r2L06NHMnn0oJaUlrFm9mp27dnH2WWdjsVj4xz/u5eKLv04kGuGB++8nHA7T2trCWWefzaaNG5k8ZQpz587DYrXQ3NLM7NmzOe/887Db7Nx1110cdfRR1NTUsGXLFtasXs35F1yAxWLhySeeJCcnB4slWX1ls9nx9nq5/fbfc9xxx3H6kiX09vQyafIkli9fTklxMUcsWkRfKEQkGqWstAxPoYeqqio2bdrEKy+/PMxXVQghhBDiwCbBlDBkVfSYK6WGqJhSgBnTx1NaUkhxUT7FRQVoWnL7gjmTKSkuxOsPUVZSSCKhE+qPUFiQR2dPgO07W9A0nXGjKxhRXoimw7xZ47DZrMTiGgV5bkpLCnA4bFSWF9LdG0JVVXRNo6c3yFcWT+fVZZvQdY3iQjfd3j6KPG4SCZ2K0jxcTjuh/iiBYJj1W5qoHVlKYYELr7+f+TNrAIUclx1N12nr8ON226gsK2D1hj3kuu2MG1OKltBoafdSWZ4POoT6I1gtCtUVhRQWuHDYrfT0BunrD9PdEyYvx0HtqFKef20NqgLlpfnUbdE4ZPIo8nKd6LpGWXE+iUSCji4vmqaRl+Pk+Vffo7gwj2Cwj4ryQnQ9weSJI9m5q5nionxCoT6cDhuKAj5fgNISD63tnbzx5ntDVFXBkBVTQgixH5k9+1AqK6vo7OwgHk8wf8F8lr+9nEWLjqSrqwuvz8v06YcweswYRo8ZzW9vu4158+czf/4C/vmPf+Dt7eW000/nscceZc6cOYDCKy+/zMyZM3nh+edZsGAhs2bN4phjjuGtN9/ksMMOo6CggLy8PLy9XubPn5c1oBuSQwPD4TA7duzghBNPxG6z07Crge3btnHC8Sfw6quvcs455xKLx3j33XdZcsYZvPXmmxx++OG8uXQp0Wh0OC6lEEIIIcSXgvrxu4iDy+DhaOZm2unl3FwXRUUFdPf4mDxxDPF4nHg8gY5GPJ7gww3bcTsdOJ12fIEg/f1hEprGuDEVdHX70DSNDzfuZOk7G+ju9uOwW4hEorgcVjRdIxKJYrdZUBQI9YVJJBLoukZzaw/tHT46unyga/SHo4TDUbz+PprbenC7bPT6QoT6IiQ0jbEji3G7bARDYTz5TlxOG/m5DgKhfnbt6SQ3x04smiAaiycrlWIx3v1gByvX7CQeT2BRFeKJBC6HlUg0Tiwex+W00R+OEo3F0LTkeW3f2UpjSxeRSAxd1ygsyGFibSV5uU7jtbS299Da0YPVouKwW7Hbrei6TjgcJdQfJhDsY/TIChQUXE476+vqURQFRVFwu5w4nXasVguBQIiBARSpPlcDK6dkHJ8QYn/zwQfvc8/f/4bL6aKvL4S310t/fz9vv72MBQsXEgwGCfj9WFQLPd09tLW1sfSNN7j1/26hqamRVe+vYvbs2by7YgWRSIRQKIjf7yeRSBAMhQgEA4QjYbo6u4jGYrz99ts07GogEU/Q3t7GmjVrBtWSjqgcwVe+cirnnHMub735JvF4nPr67cTjcXR0WlpaUC0WcnNy2bZ1K91dXUSjUd5etkyG8wkhhBBC/JsUXUoqDlp+v5+CggJ8Ph9Op4vjTj6fjZu3o6gWVFVFUSwoioqiDr612WxYrTZiMY2cXDegoCgq/eEYLpeLhKZjs9lwOZ2gqITDMSyW5OOCfVGs1uT9aEzD7XKiAZ78XHq8ITwFOYBCMBTFU5BDsC+KoiiEI3EURcXlshMOx0FRcLscRKLx5NC4aByH3UZCS35LWywq+Xku/IEw4UgMVVUpLHATiyWIxRMkEsm+Ufl5TiLROIFAGLvdkgrYSAVIFiyqgj/YT2GBm3A4SjyewOW0EgiFicfixBNxVEXBalEIR6KATklhLoqi4/WHyM910usNJPtjaRoF+S5mTB3D9h1N1O9swumwEY5EcDltWC0KFlUhGOojGo1is6qMr62msqKEpctW4XLZCQSChEJ96HoCXdOGvtU1dC0BusZjD/2FY4467CPf//z8/C/q204I8TlJ/5v+OIWFhYRCoWGp8snJyaWkpJjdu3czfvx42traKCgoYPTo0WzZsoVEIsHkyZMJBAI0NjZSWlrKjh07jCblAE6nkzFjx7J50yYqKyvRdZ3W1lZGjRpFZ2cnhYWFBINBnE4n48dPYMeOetrb25kyZQput5v169dTWVlJc3MzkUgEh8PB7Nmz0XWd3bt309raSnV1NT6/n/6+PsaMGcP27duprKxEUVWam5qoqKhg3Ljx1Ndvp62t7Qu/jgDFxcV4vd59avYuDj7y2S7Evn8uFhcXU1tba/Q1FF+cSCTC1q1bCYVCQHIyE3kfhoemaZ/bH9v25TNJgqmD2OBg6rxkMKVYUNLB1MBbRTXuq6oKA9Ypijp4WVUz61NfDLVOUYa4rwCp29Ry9rpUw3MlOTBDSS2TXk4P2FCM/zc084x8pmWjoblpdr2s2fnSs/CZmpNnNUM3z9ZnWgbdqLZKhklaalvCWE5vSzdP1xIJU+iUHUANdatpCUDj8Yf+wjFHHf6R77/8x6sQB74DIZgSnx0JpsRHkc92Ifbtc7GgoICHH36YI4888gs6K2GWSCT461//yrXXXovFYqG8vByrVboNDQdd1+nu7jYmgPks7ctnkrzrwmDuLzXkranPlMvtQNfAZrcRjsTJcTsJR+O4XU7y8/PoD0fxB/pJhiqQDIWSj3fYk0l4OBInL9dJMBRBR0tWVwGRaMKYdS8ZQGnouoqClppZTyU5g19y1r38PDeBUBhdN826pyvJCqV0GKWnZwLc64sHknlWXo4DXcsEVf5gappwXcPltGO3Wej1BQeEVObgKfnY8pJ8Ort9xOPJEEtBJy/PhdcfTAZO6eF3qdn53G4HkUgETQOnw0Gu24mi6PT0+vDk5xCLRQkGQwSCQdxuB319fWhadrNz821muJ8QQgghhBBioJKSEhYtWsTll1/O+++/P9ync9D5j//4D0499VRuuOEGbDYbFouF1tZW+aPLMCgtLcXlcn0uwdS+kGBKZEkHLVmNzgeEUoqqcMKxC/H5Q1hUlaaWTo5ffBjvrFxHUaGH0pJC+vojbN/ZTGNTB5UjSohEE+TkuCgt9tAfiTFp/Eg2bNpNTo6Tji4/o2rKSSQ0wpE4LpeT1vZeOrsD1I4uoyA/h6bWHnRdwWq14HI6sFotWCzJ6qoJYyvo6A6wdUcbo6pLaG33UlHmoaM7QH6eC0VJzqqX63ayfVc7DruNibUV9IejBPsiBINhbDaVEWUeOrv9TBhbQXdvkCkTRtDQ2MXm7S3kuh20dvSy+IjJtLT10h/Opz8cTU1prrO7qYPKcg/5eS621jdRVpzPScfOYtnKjThsFhqbO5lQW0l5mYf312yhtDiflrYuSosLCIcjKIpOXo6Lhj0t9IfDlBYnm78vmDOFD9ZuYszISrZs28kh08bzweoNzD10Ki++smzImfmymp8n31QhhBBCCCHEAIqioKoqe/bsob6+frhP56DT2dk5aOheLBbLGsIvvhiaphmjkIaDBFPCMKhiygg30us0UBRKigsYUVFKVWU5y95Zw9GL5tDc0s7sQyayYtUGotEYnd1eKspLaNjTysTxI3G7nHgK8ggE+4nFE3R1+5g2eRTxhEZNVRlr1u+gsqKYibWVWK1Wijy5LH1nI4dMGcmHdQ0snDOeji4/LqeDwoIctu1so6ykgLotTehjylDQmTJhBJ58NzWVhaiKgsNuYfzYCoKhCF5/HzarhR0N7YwbXUI4EmHKhErC4SgNjV3k57koyHMxpqYYX6CPEWX5eH0hWtt7OXzuODq6/DS3deMP9hGORHE5bVSUFVBcmMPS5RvRdY2ayiLGjamgvaOHeDzBjoY2Fhw6AZ8vyKjqErbtaGJUTSlHLpxGqC9M1YhivN4AlsJcRteUs2tPK6Djctop9ORhtSi88db7WCwqdruVWCzG9voGFh+zgC1bdySH9g2amS/7ViqmhBADaZqG1WqVoXwHMFVVk7PVys94IYQ44OXn5zN+/HgCgQA7duwYslpo/PjxWCwWtmzZMgxn+OVisVgoKiqis7PzEz+2pKSE0rIyQsEgTU1N/1aAVllZRVdXJzk5OYTDYRRFweFwEAgEKCwsxOl04c5x09HeTm9v76d+ngOFBFPCJNNHCXQUJbWsaCh6aiieruMpyONfLy7F7U42Em9ubef9NZuZMW0CPb0+Zs+YRF5eDqtWb0LXdbbX76GsrJidu1uoriynYU87kWicUF8YRVWJRuNMmzSSlrYetu5oYlR1Obv2tAEaFlVh7KhyNm1tpHZ0BQlNZ0dDGw2NnRR5chlZVczupi66vSEqyzxEonH6+qNYVAVN02hq6SYciREIhqmuLAJFp6W9lxmTa7BaFHq8QUZVF9PW6SMasxEI9eP19WGxKCQSGl09AfLznKxcvT3ZYLe9l45OH4dMGUkw2E9nl5fObi/oOlaLSme3j1g8RiKRoLm1i1BfH067lcbmTsaNHUFHZy+9Xj+eglyamjsIBIJMHD+S1vZuurp7CYcjgE4sFmPS+FpcTju7G1uIRWOUFBfy3vsfMmXSWLZu32UMHcyamW/QbH2p7UIIkRIMBiksLCQnJ2e4T0X8G/r7++UvykII8SUwd+5cjj/++ORsr2+/TXt7Ox6Phw8//JCJEycSiUS49tpreeihh1BVlcrKSlatWkVNTQ12ux1I9h3s7e0lLy+PDz74gAkTJlBQUMAHH3zA5MmTyc/P54MPPiA3N5epU6eydu1aysvLKSoqYuXKlUQikWG+Cl+cgoICliw5g7vv/isANSNHUl1VzaZNmygvL6OwqIhdO3fS39/P1KlT6e/vp75+B6FQkBNOPBFd1yktLeO1V18BwO12s3XbNsaOGYvdYaduwwaqqqrweDz4AwFampuZNm0azc3NNDY2Askw8gdXX82zzzyDrus4nU7cbhdjx9by+uuvUVtby8SJk6jfUc8Jx5/Ac889S0NDw3Bdsi+EBFMiZUCgkQqlFEVPrjJVTW3dtiurKXrDnhYUReX1t1ahKCqPPvWq0fQcRaWhsZXdTe0oisqmrbtRSDY/b2zpNJqdb9ramGp4rrBlezPphuZvvlNHZ08AUNi+s5V0Y3NFUVj23qbksQAUhZbWHhRFQQdTGaKSam+lsHVHKzrQ2R3j9eWbKC7Kpac3aFQcKYpi3E9fE7fLzor3t9HTGwBdZ/O2ZkBn6Tt1qX01o8fU2+9tzFrn96f7UCWble9oaM5ubJ76amppz2qCrusaPb1eVq/dZDQ437xlB+gamp7g2effSM64x1BBVHYFVeqdFUIIQyKRoKurKzmBhTggZX9WCSGEOJClh7Lpuk55eTnnnHMOnZ2dlJeXc+6553LrrbcSiUQIBoOce+65bNiwgYsvvpjx48ezatUq5syZw5YtWzj88MOpq6vD4XBQXFzM0UcfTWFhISeddBIffPABo0ePpqqqinfeeYdx48Zx8cUXG0Pp3njjjWG+Cl8cRVGwWJPXPDc3l9NPO50NG9Zz9jlnU1xUzNatW5k2bRoBf4Du7m4OO/xw+vr62L59O1aLFZvdhtPhIBqLMWHCBObOmUtZ2fsceuihNDQ0UFxUzKTJk1j57kqWnL6Eht0NeDweFh52GH/8wx8Ih8PMmTOXuroNzJ8/n4cffoizzjqbRCKBarGw8LDDeP211xg5ahRvvP46kyZNZsrUqRJMiYOH0TQ73VfKGNKXHMJnDPMzwist2VQcHT091C/d5Dy1j5K+j2lomalvVbLBeZKCgp5ubJ6aea+9y5ucWU9R0FPNzpON0UkdK/3I9DMoqePomXNJnyOZefl0oKvbZ9xPnpqW2ZoK40J9/YRC/ZlzNzc8NzU+Ty+bg6qPn6FvQJBkzPRnuu6mSihjVsDUTIDp4+hZ67JDKnT5a7oQYmhSbSOEEELsH7q6unjmmWeIRCIsXryY9vZ22traWL9+PWvXrqWzs5O2tjZjGLeu6/T29vLaa68xfvx4li5dSkVFBatXr2bixIlMmDCB3bt3U1BQQEtLCytWrOD4449HVVXj899ms9HR0UFra+swv/ovXnFxMYfOmUPAH0hek9TvToGAn7Vr1/CVr5yK1+dlROUICvILjO4oCS1B4449eDwerBYrU6ZMob29jZzcHBqbGtm0eRPV1dXEY3FqRtZgtVpRULBYLMYwTafTyaxZM2lsbMJqs1JSUoLNbsPb4WXnzp0sPGwhzc3NuJwuZs+ezcRJk3hz6ZvDer2+CBJMCRNTXylz0DTU7HymSp2a6nLaO3sAhYSmY1GVdGETipqMgiyWZAVV5YhSwuFYaihfJBkzpf5on5y5TmP0yHLGjh5Be6cPRVEIBMO0tPWiqMnmhJUVxeS4HWzf1YYn343dbqOzy4+OgqZrjB1VTmd3gGBfJBVE6clgK5VMqYqCpmdCKlVVkmO59eT99GtOJHRjSJ+Sir3SYZSqKthsFqoqiti5uw1d0xlR5qGjy4emJdA0BV3TSWjJa2SxKCTiycehK8QTGhaLQjyuoypQVVVGQV4OVZUl7N7TSlmph3gsTkdnN43NreS6nQSDIXq83uzwamC1FAOCLKmXEkIIIYQQYr+1du1a1q9fT1tbG4qi8Nvf/paSkhI2bNhAW1sbiUSCf/7zn+zatYtHH32UyspK7r//fioqKvB6vTz88MO0tLTw8MMP09PTw/bt26mursZqtdLQ0IDb7aalpYWnn36aWCzG1KlTqaur43e/+x3l5eXs3r17uC/BF8rv9/Pqq6/idDrp7Ojg2WefoaqqiieeeAKPx0NXVxcvvfQiqsVCTXUNob4QnZ0dALy5dCk9PT1s3bqVWCzGa6++iqIotLe3Y7FY8PuTQ/ci4Qjjxo9j27ZtvPTSi0ybPp2Ojg5isRiKovD4448bwaHT5eLRRx4hEokQiUTYum0r0WiUF154gfyCfJ5//nmaUkMAv8wkmBIZxpA2U+BhrppK95lSUoFVKqyqHFHClMm1eAry2bGricoRZXR0eikvK8ZqsxEKhSkuKqC1vZuZ0yey8oON2Gw2gqEw5WVFFOTnEo7EWbp8HbquUzWiGF3X8fmCTJs8Gl+gj+oRxeTkuOgPJ5v1jhtTyciqEnq9IVBg1rTRhPoivP3eFkZVFzOxdgQ93iAJTScUipCT46C7J0h5aQGFnhyCoTAF+W46u/yUlxbw9sot+IP9nHbCLOw2K8FQP5u2NTFudAX9kSj9fRE0XWPjlkaKCnOYMqGalrYeijw5FHtyyM930dMTYFRNKRWlHnRdp6vby1sr1jN5/EiqKotpb+9h7OgKdF2nfmcTo0dWULd5Bzt3NTGivJh1G7ZRU1WK1+dnzsyJrF2/hVCojxnTJpCb4+L1N981hU6ZCilzw3OMKqqBvaeEEEIIIYQQ+xtzE25d11m/fr2x7PV6Adi8eTMAGzduZOPGjUAyYAHYunUrANu2bTMe1zhEkBEIBABoamoCoKOjg7q6us/qZRwwYrEY769albUuHc75fMkRNQ0NDTgcDl555WUCgYCxvrm5GUj2eQSGbKDu83mJRCJs3FhHW1sbsViM91auNLZHo9Gs50sfOy39Pm3ceHC9N9JgQhjSHYmGnOXNvH7AcLL8vFwcdhtrPtzE6JGVRKMxenp9VI4oRdc0nE4ba9ZtxeV00NLWidViwesLctjcqUQiUYqL8lEUneLCXNKhi6ZpqJZkz6eCPDd5eS4i0Sj5uS7ycpxEIlF6egMU5Ltwu+wUFriB5NBBXddpbuvBU+CmuqKQMSNL6OkNcOSCiUQiUTz5ruQgP01j+87WZAO74jxAJxTqp62jl631LUwaV4nTaaW8JJ+CfDfFhblEolGKC3Pp6PIRCPbhsNuoKPNQXlJAUWEuToeNus0NtLZ1Y7dbURUoKymgpbWLstIC/IEQ7Z09jB9bhaKA1ZL8J1iQn8OihTPo9fqJRKJoqYCpo6ubmqpyQqEQ/f39mWqprN5S2hDrMsMEhRBCCCGEEELsu0gkQmNjoxEOfhI9PT00NjYSi8U++xP7kpKKKZFh7m2U7illDjvMlVKm9R+sqUO1WOjri9Dd46O4qJBoLM4LL7+NzW4nGOynPxyl1xugqDAff6CfUF+YF0P9dPX6aW3vIR7XSCSSAcumLbuprChGSyR4f+1WQKGoMJ9Dpo5mw6bd9Pr6yM1x4XDY2LytCRTYubudSCROPJFg7YadlBYX8O7728jNdeK02+gLR2nt6GX9pt109/rpD8eIxxOE+iJs3NpIa3sv8Xic9z/cgaZpRCIxunv9FBa4CYdjjBtbwY76VrREgs3bGhldU0ow1M/7H26jIM+Nrmv4A31oWoJwJEpzaycWVSGeSLBqzWaqRpTwznt1WK0qiXicDzWNirJCGpvbicdjfLBmE0WFedisFkBn5ar1qKmZBV9+/R1CoVB2jyl9iABxiDBRhvIJIYQQQgjx0TITJwkhhoOiS0nFQcvv91NQUIDP58PpdHLU4tPZuGlrZsa91Mx66dn3kjPome+nvlRL1jLG49TB641tSmYdSnIGP9L7pmfeU41bi6pis9uIRhPJk1cUoyl6+jY9A5+S7h6V+oBJNpxLHjeeSJDdAn3gTXrBqB8DXcdutxKNxlLNzUlVImWamA9uhG4aUjfgduhm6KkvTRu8PmsmvwSYljUtkdlfS25P3k8Y+zz9+D859pgjP/L9z8/P/0y/t4QQX7z0v2khhJDPdiH27XOxsrKSlStX0tTURHd39xd0ZiKttraWlpYWTj75ZCwWC+Xl5UQiERn1MQwcDgfBYJCenp7P/Nj78pkkFVPCxFwJNWAmPl3H4bARjSUyfY7QU7uYGqKT7FGlKgpWq4VkEVR6Jj4t1YRcIzmKNDX0TgF0HUXRUnmQmppPT0vOwKdrJHSdRFhLhlWYZ/BLz7WnpNYqOBxWwpE4qgJWq4VYLEE8bpr9T1FQFAVNSzYjz8xmlw6i0tcCI3SKRKKgJ5uYq0ryMdFoInVdrITDUUDHbktP95ogFouT0AaHUqBjs6lEI3F0XcdqUdF1iMUSOB02NC1BPBYnkdCw2a1EwpHUcD3z7H/JL7vNSiIRJxZLZIVhWTP0CSGEEEIIIQZpa2vjyiuvZMmSJdhstuE+nYPOrl27uOeee4jFYsTjcXp6enA6ncN9WgelQCBg9C0bDhJMiQzdFGakwiJSs/MpikJt7Si83gCFHg8Ne1qorCynry+CzW4DVOIJjZLiIvyBEIfOnMq2HXsIh6N0dfvIyc2hID+XUChCd6+fosI8ios8dHR5cbmcOOw2+vqjOJ0O2ju9FHryCEdiOOw2Soo9tHX0UlLsIRjqx+GwE4nG6fWGcNht1FSW0tUTIB5P4HI5mDtzHDsa2uno8lFclE84EkXXIRgKU1HuwWqxUJDv5v219Rx6yFi8vhBefx9Oh5Wmlm5QdKorSwiG+kkkEiQSGna7lRy3g3A4SmFBDk6njR5vAK8vyLxZE2jY00ZbRw9VI4opKsilq8fHmFHlrP5wK73eAE6HjZ5eH+g6ZaUeTlw8n4cef5nCglwmjKuhrb2LeDzOlIlj2Lq9geKifPz+ANFolJbWdoqLPDQ1tzCiopT+vj76+vrIy8shN8eFz+dj167dg/pLGf3ChvWbSgghhBBCiP2Tpmk8//zzvPjii8N9KgetRCI5IkbXdQKBgNH8WxxcJJgShvRwtHQYlVyZrF3SdZ0xo6rJnZqHzx9gwvgxhPrCtLZ1cvhhc4hEYiQSGj29flRVxWazUlZaiM1qY8qksVgsVsrLiunrC/PU88uYd+hkOru8TJsylvy8HCLRGAoKW+ubaOvoYfrk0dhsVlwuB06Hne7eUhx2G4WFeTgddl56YzWgUVKYx+xDxrJzdxvhcIyaqlIKCtzk5jiorBiNy2mnqDAZckWjMbbUN+NwOVBIfhDZ7RasVpVZ00fT1xchHo/T3Rtg0rhKCj3JgEnXobvHz9RJI2lp7aavP4LVolJR5sHpsGGzWejodDB31oTkUDySwZbLYcdqtXDE/GlEYzGWvr0G0JkzcxKJRIJDptZSUVZMJBph8oTRdHb1UlSYT3VlGRaLgs/no3ZMDWNHV9He0cXY0VW4XQ6sVgsNuxuZcchkGhr2EAgEhu4zpZtDKiGEEEIIIYSZoigUFxeTk5MjfaaGgaZpQ85MJw4+EkyJjFRvpHQYZVRNpYbc+QNBgsE+9jS2UlNTRUlJMW53EJ8vQE+vj1gsQWNTG+XlpeTkuIlFY+za1cwxR83jzeWrWbRwNnua2pIz7qkKIypK6OrqJRaPEw7H6OuPUL+rCS2hsbupjYVzp7JmXT1TJo2mp9fP6JHl9PdHaGvvIcftIMftoK8/TDQ128GE2kriiQT9fRFKi/Po6PZjsSj4AyF6vSFisTjVI4ppaumisCAHBZ3OLh/5eS4S8QROp5VgqN8Im/r6wrS09VCQ5yY/14nPHyIWixHq68eTn4PVohKJRInHVcpKCujo7MVms6DrmhF8RSNRSks8bNhUT3mph16vn77+fjZt3cHokSOIx+Pk5rhoaemguKiASCRKIBgiHA6Tl5eDzx9AUaC0pJCm5jbi8Sh5uTlUlJfi9foIh8NEIpG9zNCXDMmEEGKgT/sf39LzYf/z7/wiJe+nEOJgZ7FYyM3Npbe3l2g0Otync9DJyckhLy9Pgikhzc8PZgObnx95zMnUbdyy12bnqsVqNCxXFRWrzYZOsjm5qlrQdIxG5harLVWoo2CxWNA0sNpsgIKmwehRlXR2+4hEYiiqiqqqyUwl1ch88oRReH0hOrp8WK1WEpqO1WJJFgABeblu+vqjJDQNq8WaurWg6ToKCqpFpSA/B7vNSkdX8gedpiePEY8nsFiSQw8VSD63nuyXlS4ltVpUNE1H0zSjV7qa2p4e7mhRVRJacllVIR6LJ3dMNTdXFHA6bUyorWbdhu0kEvHk8dDRtESqK1ayQi0ej2NRFbREInlMTUNVIRGPo+ta6vgxo3G6oiTDwng8hpZIoBnNzhOmZunJHljPPPkgi4896iPff2mQKsSBb1+bn9fW1lJTU/OpnqOpqYn6+vpP9Vjx2bPZbMydOxe73f6JH5tIJFi9ejV9fX2fw5mJ4Saf7ULs2+ei1Wqlurqa1tbW5B97xReqoKCAvLw8mpqahvtUxOdImp+LT8QY/qWYq6Yy9zUtkQxEFNAUiMViptn2tEyYpYFu2qbroCgqcdO6ht0tmZn5NB1N0VLHSTYw37x1d2p2PpVYLAYoxDTNmIXP5w8CySbmMS2GopDcnp6ZL6HQ1e0lMwMfgEI0kRyimAyIkiGXpiXMVwF0iKbXmZqiJ0x9m9BTIZauoQOJuDZ4pj5dJxiIsvrDLakG6BrmIXaaaeY9dJ1YInsWPi2R2Z5IDJydLznzHqaG6oOqpWQYnxBiCDt27GDnzp2f6rHyt6z9SywW49133/3Uj5f3UwghvjgOh4O8vDxisRh+v3+ffgarqorT6aSvr4+cnBz6+vpwOp3YbDbi8fhH/nHBZrORk5OL19uLqqp4PB50Xcfv92f+GG+1kpObi8/r/axephCfijrcJyD2J4ObZyfva4PWZ/UxGvA4XdcpyM8lx+1KbdOA5ExxhYX5OBw2dDQcDhsjKooBnYqyIsg6ZjrgSQcvyVuLqpCf76a4MA+bVR0U+CSHIqZnwNNNQU46rBnqK3neI6tKUqMXTcdIvbaRVaWpbbppyGNqu24+18x1KC/zMKqmHIuanN0wL9dNaXEB6BrVVWVYrRbTNUte4xHlJSRHZZh6Rpmvs/m8B+4zVJ8pZDifEGKwgT9f9vVL7H8+7Xsp76cQQnyxFh15JF+7+GIuufRSpk2bhqewkFGjRmG1WqmoqKCmpgabzUZpaSlFRUVAcqjbRV/9Gh6Ph+tvuIHS0lLOO/98Fi1axJFHHkVZWTmqqlJVVWXcr6ysory8nPLyck448QQASktL+e73vs/pS5Zw7rnnkZubS1l5OWVlZUydMoWSkhJUVaWouJi8vDzGjBnzqapxhfi0pGJKGJIZSXpWPlM4Zeo1haIzacJYJk0cT2dXDwX5+XT3+igtLiISTfaJ6u31U1TkQbVYqamuoLmlk1g8QXGRh54eP4FQP1MmjWV3Yxuegnxa27upqixlwrgaPAX5rN+0gwm1I2lq7aS0uBBVVYnG4pQWe2ht72HWIeNYs74eTdPp7PYxoryYosJ8enoD5OW6aOvwsm1HM4ceMo7KiiISCY2Wtm7iCY3yUg/dvQE6u3yoqkJLew9zZ45H18FTkENhQQ4Oh42OLi+1oyqo39XC7sZ2Dp83me5xVWytb2TsqAq8/iAffLiNirJCZk6rZfeeNgLBPgo9ueTluujr60fXdUqKC9C0BFMnj6GtvYspE8ewp6kNm9WC025l3NgaNm2pZ8euRiyqwuyZk3nh5Q4qyoo5dNZUtm7fSXlZMbFoFLfLiT/gZ0RFGeH+fl58+Q0SWWHVXoJFIYQQQgghxLCy2+10dnZis9ooL6/ghBNPJBKJsHbNWjyFHubOnccrL7/EqaeexksvvUhfXz8+v49EPM6RRx1FNBpl4WGHoSoK/f1hDj30UGbMmMHrr7/G0cccC7rOypUrWbBgASjwxuuvY7Ukf91XVZXmpibu++c/+ebll7PoyCM5/LDDefqZpxk/fgITJ03iw7UfMm3aVPILCkjEE7S0NPPss88O81UTBwupmBImAyulBlTppKqYSkuKaGlpo6Z6BMXFHvr7+gkEQ+Tn5VBUmM/CBTPR0RlRUcLOnY1UV5UxqqaCdRu2kpfnZuK4kTiddiaMG2kcu6zUQ0F+Ljsamph1yHi6e7z4/UEmjqsmL9dJRVkhm7Y24CnIYdeeNtwuB+FwhOOPmk1BnpuSonyKC3MBncqKQkCjqDCHHQ0tdPf6qakqoXpEMZUVxbiddo454hD8gRAuh42SonymTqxBARJagopSDxPGVlLf0MKYUeXoJHtA7drTyuxDxlFRXkSRJw8FnbEjK8jNceIpyOHIww4hN8dJkSePaZPHUFbqQVVg9MgK3C4HtWOqaWvvwuWw43Y5mDhhFA6HjbKyIpwOG7quYbNayM1xMXlSLS6XgzEjqygtKWT6tImUlRXR1NxKMBjEYrHgcNiNqq7syikt+y/ikk0JIYQQQggx7Gw2GzUja2hs3IPH46G9rQ2ny0ltbS3NTU0UF5fQ2trKO++8QzQWJRaNsn79Oo477jieePwJjj3mWDZu2oSmJVi+fDmtbW1U19RgUVWamprwFHqSIVRzM4lU65K0nJwcaseNIycnh2AwyAcfvM+e3btRVIX169Zz7nnnsmHDBoqKiunq7sIrDcnFF0gqpkRGaggYqaF0yUqpVK+p1HoFjWg0SmlpMWvWbiAeT9Da3sWsGdPYvqOB5tYOKsrL6On10d3txR8I4fUFUFSV2jFV7NrdhMPuoLS0iO4eH6G+MJqmsWHjdhRUfIEQXd1eRlZXYLWoLF+5HhSFUF+YXm+Qvv4wI8qL6e7x09Hlw2JR2dnQyuSJo+juDZDjdib7Mekam7bsJhDsw2630dbeTW6Oi46uXkJ9YTwFOfj9QaxWC82tnTQ2d9DT6wegQYHm1i7GjKxg7fp6tESCus278OTn8MayNdRUl+L1BdESCZpaOohEI+xqaKG7p5fm1i7Gj61mR0MUrzc5dlxVIRTqo9frp7gon2Cwj55eL319/dSOqaappY3cXBddXf00NrdxyLQJbN22g6rKcvY0NlNdWcaOnQ309PTS3d1D3cZYsoosGskMHcQcIpqrpVLvqRBCCCGEEGLYbN2yhTWrV+POycGiqjz91NNUVo5gY10dFtWCoijs3LmT5uYmEokE69etA6Czs5Mnn3iC7du38eSTT1C3YQOlpaXE43GCwSCtrS309/URiURYvXo10WiUaCRCY2Mj8XhycqZAIEBzSzMTJ0zkqSefpL+/n/z8fIKhEGvXrGHr1q14PB42bd5MOBJh8qRJ1G/fPpyXSxxkZFa+g9jAWfkOP/I46uo27XVWvvRsfA6nk4Smk+xFrqT2U1FUS7I5eXpZUU3N0VWjmXn6fnKbktlGeh/TNtL7JOewM7an5rRL/i+5LmsZjGWMpeRCbo6LaCxONBoztmWFN8Y/CT11V8++bwrwdFN1mbHO1DPK3Hsqsy7T8DzTyFwjL8/NvNnTjKGTxj7oWfuj68TiMZa/8x79/X2ppuiJrGOZZ+V77pnHOG7xMR/5/svMPUIc+PZ1Vj4hxJeffLYLIbPyHQhkVr6Dg8zKJz6Z9NCvrP5SWmoomIKCjq7oRCIRI3ACNbl/qlpHQSVTpZOsvErXXIGKQno2PDW1TU016FaNGQDT6zK3CjqKcYueCaJSZwWpoErRQVdMwRSZQEpHR1EUAsHQ0C8f87C31Gx8RjUSAyrK0kFVuuE5psbr5gbppqDKGGaX3aA9fbyAP8jSZSsxN25n4Ex8pvApkYgZjc6NoZdZwy+lx5QQQgghhBBCiP2bBFPCxBRqoGQFVPoQQ/sUI5AyDxlLbcsKoFL76hq6ohpDAgeGUwpq6vCpoIrUeaCklpMBlBFQKamQKhV9GeuN/ZKhzPSptdisVuKJBDarlW07mlAUCIejyT5N6KiKgs8fSoVH6WuBEexkAilSlU8MqpJKV0Nlr08HRJkZ/LJDqexZA+PxTPhkVFoNEU6hJ9C1zAx92X2mMsvIrHxCiAGsVivHH388ubm5BINBVq1aRXd396D9Ro0aRWtrK/F4nFGjRrF79260Af0qxPCbM2cOe/bsoaOjg5kzZ9LZ2UlzczOTJ0+mr6+P3bt3G/u6XC7OOOMMnnjiCaLRaNZxpkyZQnl5OUuXLgWSFdGjRo2iqanJGAoihBBCCPF5kGBKGHRTwJJVNWUES+mgQ0lVA6UfqZCujIJ0gVUqkELDGA2nqCi6lsySSN0fKpyC1OPSIZRqWs48bbJWakDllKIkT9+IpRRcTjsTx9UACp1dvZQUT0FRFOLxBDablfw8N/F4ghdeXUmoLzwgjEqf/McM5TOG7WUakGcNx0tdO31gWGVUWGW2ZR1H11IVZZkQytg3K4waolLKXO0lhBApbreba6+9lscee4yZM2dy9tlnc8UVV1BeXk5paSkNDQ0kEgl++9vf8sc//pEdO3Zw1113ceONN7Ju3ToqKiooLS1l27ZtaJpGRUVF6mdqnFAoRG9vL6NGjaKtrU2GRXwBFixYwMKFC/nTn/7ETTfdxHvvvcdNN93Ej370I/7whz8QCoWorq6mvr6eeDxOIpEAwOPxMHr0aDo6OgCYMGEChx56KF6vlz179gBw5513cvPNN7Ny5UrjcUII8WWj6zr5+fkSwg8Dp9Mpf/QSgARTwiwdZijpqqkURTENaUsNz0tVPqWX9fSQPt1UHQUkh++Zqq6MkIlMUGVUR6VDKtPQPWOfdGWUKYjSk1VdmUopkkMBlfTwvuRyOBxh6dtr0IHaMVXkuJ3omk44HGHLtgYWzJ2KzxcgGo0lq5CMMMfcY8o0rG+oIX1GRdQQ1VNDBlXmkOqjAitz2DUgiDI3OR9QKZU9xO8z/j4RQhzwNE2ju7ubvLw8XC4Xqqpy5plnUlNTQ1VVFb/85S8ZPXo0xxyT7E83atQojj76aACuvvpq9uzZg8Vi4b777uPuu+9m3bp1LF26lBkzZnDbbbfxq1/9iu9+97sSTH0BXn31Vf77v/+bpUuXEo1GmT59OrW1teTl5REIBLjtttuor6+nrKyMX/ziF5x99tksW7aM3/zmN9TX1zN69Gja29tZsWIFRx11FPn5+ZSWlvJ///d/jB49mqOPPpq1a9fS19c33C9VCCE+c/F4nN7eXnJzc7HZbMN9OgcdXdfxer3DfRpiPyDBlDDo6fDIPBOfku4dpWSWMVXoDKio0o1QS03VMKWrrMjqJ6UMCqoyVVHJqic1dVaZ4XxGYKWkQykFRVdSp5savqfoyW2pZ1QUnfqdTahqct26Dduw2WwoQCQaQ9M03li2GotFJR6PmSrBMkP5jKF9RthD9jUw1qeqnWBQAJUdVGWG7hlBljnEMpYHV1FlB1GZiirdXCll7JMO2SSZEkJks1gsFBcXE4/HKSkpoaysDI/HQygUYsyYMYTDYbZs2cJf/vIXuru72bZtG3/961+59NJLcTgchEIh5s2bR0FBAX19fVx//fXous5ZZ53FKaecwtatW+U/NL8gO3fuJBaL8Y1vfIMnnniCuXPncvnll/Pee+9xyCGHUFxczIYNGxg5ciRlZWU4HA6qqqpQVZVf/OIXnHzyyRx77LEALF26lF//+tc88MADtLa2snXrVv76179KKCWE+FLz+/34/f7hPg0hDmoSTIkMI9jI7jFlhEumcXSZye4UdKNBenqXTH8oJRU+pY+RCadSw+0UUiFUev+BQ/fS1VOqKV5JhVC6kmx0rmdm40vmU6bm5zqMKC/EU5CX/VrNYwIBTdfxev3098dNQRTGTuYgbsghfebhfAOqpfYWSmVVTGFujJ6pmMquotIyQZQxtC97KGGmWsoUiH3S7wMhxJdeLBZjz5492Gw2TjjhBKZOncq0adO47bbbOOqoo4BkVdW8efN4/fXXsdvtzJkzh507dzJr1izWr1/P9u3b8fv9tLS0EAwG0TSN5cuX88Mf/pBvfvObw/wKDx6xWIw333yTa6+9lt/+9rd4vV7uvPNOvvKVr+B0OgkGg9TV1dHS0kJbWxsAzc3NRKNRfvnLXzJy5EhjfSQSQdM0EokEuq5jsViYM2cOr776KrFY7KNOQwghhBDiU5NgShiMhtlKKohKjrdLDsXLaoRuqqZCQ9GzZ+LLhFHm4XmZwCq7cirZc0rR08PxMsP6srMjLVMhlY7FzJVSWT2mMt2odEVhy/ZM49fMA8k6euYimOMvczj1CXpMmcOqrCF+A0KpVO8o3RRcpa9jugIrHWoNGsI3xNA+430xHStrSKYQQgDhcJjHHnuMKVOmEA6H+clPfsLGjRspKytjypQp3HPPPfT09HDXXXcxd+5cotEof/zjHxk7diwPPfQQFouFSZMm8eabb9Le3s6jjz6Kruu43W5aW1upr69n06ZNw/0yDyr/+te/6O/vp6WlhUAgwH/913+xbds2EokEf/7zn5k1axarVq0iEAhw33334fP5eOyxx3C73djtdrZs2cLGjRtpb28nGo1y33334ff7ufPOOxk3bhyqqn78SQghhBBCfEqKrutD/HYuDgZ+v5+CggJ8Ph9Op5MFC49gQ91GFEU1vlAsKKqatc68jHm9omSvU9VkQJR1PMW0X/I2s49irCM1ZA9FSW3P3Br3jfWkqqSSgZOiKKZXqWTdJO9mFrLqifSBd8xhFKbgJ/WoQVVS6W2ZSqZMr6chQikjrMq+nz1bX2q7pmHuS5U1c5+moeuJrPXpmfzQNV544VmOP+64j3z/8/PzP9X3kBBi/5H+Nz1campq+MY3vsFTTz3Fhg0bhu08xMezWCycdtppTJ8+nebmZh599FGCweBwn5b4DMlnuxDD/7kohEjal88kqZgSGaahZ+kvxTRMD8i0nxpqJj5SM/Ep6eAFU4PzpMyQPFPllKKS6UtFqpIo3VfKPIQv+UyKMTQwXSmFcT+9R/rZkudjPue9DW5Lh1CZV2Nck/SyEUhpqcWBPaaGGLpnqpbKBFV7C6UyvaGyQ6pMFZU5kDL2Y4iKKlPFlYzlE0J8ERobG/mf//mf4T4NsQ8SiQRPP/00Tz/99HCfihBCCCGEBFMiIzN8jORMfKZhaumhe4ox3Cw9TC8dYCWH9aULjBTUzH6Kqdm5PnQ4pShqMnAxNTrPDAdMz9CXHsqXWmc0OjcG7pFpHpUeggiZZugfl9BkQhxzGJW5Nul9hhrKxxBhVOqamIf1mYKroftODZyRLzugygwrHCKIMj2vsU1SKSGEEEIIIYQQ+zEJpkRGOszImolPI11yZK6mUkxhVWYmO9VYn2lwrmbdR0kGMUOGU0bjc4xt5tn4MH0lG50nG7RntoMxnC9dSZWulDKP7tvr6zf+X2oxEzolL8/ACimyrsvAoXxDN0I3Nz0fou/UwOqqvVRRGSFiVhBlnqFPM85VwikhxEAOh4NIJPKZHEtRFHJyclAUhWg0+pkdV+wbRVFwuVxZM+dZLBby8vIIhULEYjFcLhdWa/I/+WKxGOFwGLfbjcViIRAIZB2rpKSEzs7OL/x1CCGEEOLgJcGUMJh7KGVVRenpYMm8L0ZxUqY6Kh2CpMOmdIiVHp6Xkhq6Zw6nyGp8nuwvpevpSisV09Ol1qX6TemZtZk4SjFVT5EK2rJ7Sw392tMvTM+sNS8PDKTS12rIoXwDQyrNWD/U8D3zDH2Dh/kNDqmGmskvfS7miirM110IIQCn08l3v/tdfv/73xOPx4316f585taTqqqiadpelwGKi4u56aab2LNnDw6Hg5UrV/LCCy8Yx/ykxxv4GLF3iqJw7rnncvTRR3PNNdcQDodxOBxceeWV2Gw2rFYrf/7znznmmGMYP348paWlbNu2jffff5/TTjuNeDzOmjVrjPfL7XZzxRVXcNNNN33s+ySEEEII8VmRYEoAyf+4rRwxgg0bNhjhykfNxqeYK4eMACrVeylrJj5I945Kh10KoCuKERbppt5S6eWkdGCVXm+umiL1nOn1OuZ+U5hv9VSopZi3YRzDXFCkm/7/kMP4zGHVEEP6ssKqgZVN7C2U0lLBXbqTlGZ6rtS2vYZUg4f2mSu7cvNy8Xg8+/ptIIQ4CCiKQmlpada6iRMnctpppwHwxBNPkEgkOPPMM8nJyaGhoYFHHnmEk046iUMOOYT29nY6Ojr417/+BYDVaqW5uZmbb74Zj8fDjTfeyLJlyzjvvPMoLy/n7bffZtu2bVx44YU4nU66u7u59957mTdvHkcddZQxE9wjjzzCGWecwahRo1i/fj2vvvqqBFQfQ9d1nn/+eWbPnm0Ei6NHj8Zms3Hrrbdy3nnncdRRR/H444+jqipXX3017777Lqeeeir/+Mc/6Ozs5KabbuLNN980Kq4cDge5ubksWbKEF154gZNOOonq6mq6u7upq6tj5cqVw/mShRBCCPElJMGUAJK/WPz+97/jrbeWEU8kjAFx6Vn0jIokxbTeGCJn3ofMfWXgLHlK1n1jRj3TcVJ7ZS1n9stel71m8Ox7+zZ+byiDkipMNVWZDXr2TplfoNLLpgMY4Z6xgcwwwPTx0s+hp/5nGoqnZx6bqWwzH8N0LFNANmrUKGbMmPEpr4MQ4mBgs9m4+OKL+dOf/oTb7eaSSy7hmWeeobi4mN/85jf89Kc/Zc2aNRx22GHcfPPNLFy4kFmzZhnBFMD48eO57LLLqKqqYs2aNUSjUbZs2UJHRwfnnHMOd911FyNHjuR//ud/uO666xgzZgxnnHEGt9xyC5WVlVx66aVs3LiRqVOn8tRTT3HJJZewZs0aurq6hvHKHBjC4XBWgNfe3k5RUREXXXQRc+bMoa6uDoCxY8fidrupq6tj7NixnHfeeXR2dlJZWYndbjeCKZfLxVVXXcXSpUupqqqioqKCO+64gwsvvJBRo0ZJMCWE+FJxOBxMnTp1wKzeHy0UCrFly5bP8awOPrm5uUycOPETPaa3t5edO3d+TmckvmgSTAkgGR6NHz+e8ePHD/epCCGE+JwpioKqqqiqitVqxWq14vV66evrw2q1oigKjY2N+P1+fD4fqqpis9nIz88nNzd30H/At7W18e6773L55ZfzzjvvMGvWLObMmcOKFSuwWCyoqkpzczM+n4/u7m5sNhsABQUF5OXloaoqLpeLnJwcysvLWbZsGeFweDguzQFLURScTic+n4/f//73FBQU4PF42L59O6qqcvbZZ/PMM8+QSCR4+eWX2bBhA3l5eYwbN45QKGQcZ8yYMcRiMQoKCujp6cHlcpGbm4vb7SYYDA7jKxRCiM9e+g8pn4QMbf7s9fX1sXnz5k/0mEQi8TmdjRgOEkwJIYQQBxFd18nJyeH6669H0zRefvll3njjDa655hpjaFgoFMLn8wHQ09NDe3s7//rXvzj33HOJxWL4/X7jeIlEgoaGBjZu3Mhf//pXTj/9dF577TUKCwuZOXMmwWCQaDSK1+sFwOv14vf7efDBBznttNMIBAJ4vV7WrVvHvHnzmDRpEl6vl/7+/uG4PAekdNj37W9/m7/85S9ceOGF5OTk4Pf7WbVqFSNGjCAajbJp0yYApk+fzkknnYTdbueJJ54gFosBye+Nd955h7/85S985zvfYe3ataxbt47zzz+fnJwcCaaEEF86uq5nTR4hhoemafI+HOQUXRo4HLT8fj8FBQX4fD7y8/OH+3TEF0zefyG+XNL/pveFy+VCVZP9/CKRCPF4nJycHDRNo7+/36imisfjRnXT1772NVwuF6WlpTz88MNs3boVSFbqWK1WI9xwOBxEo1Hcbje6rhOPx4nFYsY+NpvN6GE1YsQIPB4Pr7/+Ou+++y4Wi4WcnBxCoZD8JfQTsNvtWK1WFi9ezPPPP4/FYjEqnBKJBKqqYrFYjPcIICcnZ8hfyOx2O9FoFJvNhsvl4utf/zqJRILS0lL++Mc/yox9Bwj5bBfik30uCiE+P/vymSTB1EFMgomDm7z/Qny5fN7/AZ4eytff3/+Z/FXTYrFQUFBANBqVSpzPyOcxo6HT6SQ3N5dAIEAkEvlMjy0+P/LZLoQEU0LsL/blM0mG8gkhhBDiY8ViMbq7uz+z4yUSCXp6ej6z4wk+l1kMw+Gw9PsSQgghxOdKHe4TEEIIIYQQQgghhoPD4SA3NxeHwzHcp3JQS19/VVWx2+1Acnh5uvVAeltxcfGgx+bk5JCXl/fFnKj4XEgwJYQQQgghhBDioHTllVdyyy238Lvf/Y4TTjjBmOXUbrfjdru56qqryM/PZ/HixRx99NFGcOJ0OoHkUHe73Y7NZsNisaAoitGfUewbVVW5/vrrqays5Oijj+aGG27AarXyox/9iPLycpxOJzabDafTyUknnYTFYsHhcOBwOLDZbBx++OGceuqpOJ1OFEXhggsuYPr06YNmERb7LxnKJ4QQQgghhBDioFRYWMjKlSspKSlh8uTJ1NXV8Z3vfIeioiIeffRRLrroIoLBIFOnTqW0tBSv18v555+Pqqo88cQTLF68mIqKClasWIGqqmzdupUZM2Zwzz33DPdLO2BomkZjYyOHH34406dPZ/r06UyZMoWSkhKmT5/O8ccfT39/P3fffTdjx45l8uTJfPvb36a9vZ2cnBxWrVrFOeecwzHHHMM//vEPTj31VA477DB+9rOfGbMMi/2bVEwJIYQQQgghhDgoqarKyJEjcTgcFBUVUVFRQSQSoaCggOLiYtauXcsTTzzBmjVreO6556isrGTcuHGEw2EWLFhAdXU1zzzzDK+//jrz58/n1FNPZd26dcP9sg44y5Yt47jjjiMnJ4fXX3+d//iP/+DDDz/k1FNPJRqNUlFRwbhx46ioqGDRokUsW7aMxx57jNLSUhRF4a233uKFF15g6tSpbNy4kYcfflhCqQOIBFNCCCGEEEIIIQ5Kuq7jdDqxWq3k5OQwd+5c4vE4nZ2daJqGxWJh0aJFBAIB5s+fj8/nw+fz0d7ezrJly/D7/ezZs4fu7m4aGhqYOHEidXV1w/2yDji7d+/G7XZTV1fHa6+9xqGHHsrbb7/NunXr0DSNNWvWsHPnTnRdZ/ny5Rx11FF885vfRFVVdF2nt7cXv9+Poih4vV6OPfZYcnJyhvtliX2k6J/HFC7igJCeQlWmFD44yfsvxJeLTIsthEiTz3Yh9v1zsaqqivLycuLxOA0NDSQSCSZNmkQgEKCtrY3CwkJsNhstLS2MHz+e7du3U1ZWRmFhIVu2bKGwsJDu7m4KCwu5+uqree+993jqqae+gFf45VNVVUUgECAUCjF69GgaGhqwWCxMnjyZvr4+GhoaqKioAODrX/86FRUVfPDBBzzzzDOoqkosFsPhcBCNRhk7diwbN24kFosN86sS+/KZJMHUQUyCiYObvP9CfLlIMCWESJPPdiG++M9Fh8PBiBEjaG5uljDkc2a1WqmoqEBRFNra2uR67+f25TNJmp8LIYQQQgghhBD/hkgkQkNDw3CfxkEhHo/T1NQ03KchPkPSY0oIIYQQQgghxEHJZrOhKMq/dQy3201eXh52u/0zOquDj9VqxeFwZC3n5+ejqipWq5W8vDzjGquqSkFBARaLxdjf4XCQm5s7HKcuPgNSMSWEEEIIIYQQ4qC0ZMkSli9fTltbGwAWiwWHw0EkEkHTNOx2OxaLhf7+fgCcTieJRML4AvjhD3+Iz+cjNzeXV155hQ8//NDodaRpGjabDavVSjgcNpqtJxIJNE0jHo/jcrlIJBJEo9Fhuw7DyWazcfHFFxMOh3nwwQdxOBx84xvfIJFIEAqFWL16NUceeSSjRo1ixYoVlJaWYrfb0XWd++67j0gkQm1tLTU1Nbz22msAKIqCzWY7qK/rgUSCKSGEEOIgUl1dTVlZ2ad6bEdHh5TO70esVitTpkzBav3k/zmnaRpbtmwhHA5/DmcmhBAHjoKCAmw2GwB2u51zzz0Xt9tNKBRizZo1nHPOOSQSCd5++20SiQSzZs1C0zRaWlp49tlngeTP1Mcff5wxY8Ywbdo0vF4vixcvJhwOs3TpUi699FL6+vqor6+nvb2duXPnEovFaG9vp6GhgXnz5gHw8MMP4/f7h+1aDJdYLMbbb7/NwoULASguLsbpdHLfffdxzTXX8NJLL3HvvfdyxRVXUF9fz/Tp0/nLX/7ClVdeyYgRI2hoaDCqqpYsWcLWrVs5/PDD6e/vZ9SoUdx111309vYO86sUH0WCKSGEEOIg0t3dTTAY/FSPjUQin/HZiH9HIpFg9+7dn3oIivwFWQghspWVleF2u7n33nv51re+xYgRI9i5cycrV67kuOOOw+Fw8MgjjzBy5EgmTZpkPK64uJjzzz+fUaNG8X//93+MGzeOaDTKzJkzqauro6uri0ceeYRLLrmE0aNH8+STT1JWVsa0adM4/vjj8fl8FBYWMnLkSOrq6obxCgwfcwPz9vZ2du7cyVe+8hVcLheapjF9+nQ6OjrYsWMHq1evZsmSJXg8HjRNA5IVUieccAKbNm1i7dq1KIrCI488wg9/+MNP9Qcc8cWSd0gIIYQ4iPT39xvDEcSBTdd1fD7fcJ+GEEIc0BwOB8cccwwdHR3U19eTl5fH4sWLsVgsBINB+vr66O/vJx6Ps2HDBs477zxcLhetra3GMdra2rjnnns4/fTTGTVqFNOnT2fXrl309/ejqip9fX1Eo1FisRhr167lzDPPxOFw0NnZSV1dHQUFBTQ1NbFr165hvBLDS9d1NE1jwYIF1NfXAxAKhairq6O/v5+jjjqKBx54AF3XsVgs9Pb2snv3btrb243Hr1y5EkVRKCoqQlEULrroIqqrq43wSuy/FF3X9eE+CTE80lOoypTCByd5/4X4cvmip8UWQuy/5LNdiH3/XCwrK6OkpARN02hqasJms1FVVUVjYyPhcBiHw0FfXx95eXlUVFQwb948RowYwQsvvMD69esBKC0tpaurC7vdTm5uLvF4nMrKSoLBIF1dXTidTqMqqqSkhAULFlBRUcHLL79MXV0dY8aMIRqNsmfPHg7WX89tNht5eXnMmDGD1atXk5ubi8fjYefOnUSjUUpKSujo6ACS17usrIzdu3cbVeBOpxObzUY8HicnJ4cjjjiCkpIScnNzueuuu7IqssQXa18+kySYOohJMHFwk/dfiC8XCaaEEGny2S7E5/O5aLfbKSoqIhaL0dPT86lCpPQx4vE43d3dB20QtTc2m+0zCZEKCgrIycmht7dXKsWH2b58JslQPiGEEEIIIYQQ4mNEo1Fj9r7hPMaX2WdV2eTz+WS4+wFEHe4TEEIIIYQQQgghhBAHJwmmhBBCCCGEEEIIIcSwkGBKCCGEOAi5XC5KSko+9eMnTZqEy+X6RI+x2WyoqoqqqowfPx6bzfapn18kuVwuPB6PsezxeHC73Z/oGIqiUFFRwZgxY3A6nZ/ZuVkslqzvMZfLNWS/l5EjR/5b34tCCCGEOLBJMCWEEEIchC677DLuvfferBDD4XBgt9uN5fQMN5AML1wuFxaLBYvFwvXXX09VVZWxn9WabFtptVqxWq3Y7XZUVcXlcqGqKoqi8IMf/IB58+Zht9s58cQTycnJGfQ8qqpis9lwu91YLJYv5FocyE466SQee+wxioqK8Hg8PPTQQ5x22mlA9vtptVqxWCwoipL1HlssFr7zne9w11138ctf/pJbb73VeD9dLheKogDJZr3pYNFqtRr7pN8jm802KKh0u93cfvvtVFZWAvCtb32Lk08+Oet7CeDCCy9k0aJFxvEVRTHup4+bvu9wOIzjD/x+FUIIIcSBSZqfCyGEEAcZj8fDEUccQUtLC0cccQSvvPIKRxxxBJdddhm6rvOb3/yG0aNHc9555xGJRLj55ps56aSTmDlzJj09Pdxyyy1GyHHKKaewZMkSYrEY//u//8sJJ5zA9OnT2bVrF319fSxcuJCenh7uuOMOzj77bA4//HB+8pOfMGrUKCwWC2eeeSann346mqZxxx134PF4uPTSS7HZbGzZsoVf/epXJBKJ4b5k+y2n00lZWRlXXnkl8XiciooKnE4nkyZN4uqrryYnJ4fbb78dp9PJ7Nmz2bZtG5WVlfz9739H13UmTZrECSecwGWXXUYgEKCsrAxFUfjpT3/KmDFjWL16NQ888AA333wz8XicP//5z1x++eUoioLD4eDxxx+noaGBa665BofDwWuvvcYDDzyArusEAgHq6uo46aSTePzxxznyyCP50Y9+xNVXX80hhxxCIBDgpptuMqrorrzySt5++226u7u59NJLefbZZ7nmmmvQdR2fz4emaVRUVHDNNdcwZswYLr74YnRd57bbbmPbtm3D/VYIIYQQ4lOSiimxX9F1nVgsRiKRGDR1qq7rxONx4vH4vzWtaigUYuOmzfj9gY/dV9M0du7aRcPuPUOeTywWIx4f+lyT2/69cxVCiM/D8ccfT1dXF6+//jpf/epXcbvdXHnllfz2t7/lxhtvpLu7m8svv5yf//zn/PznP8dut3POOefwyiuvUFtby7x584Dk0Kwrr7ySd955h0gkwllnnUVlZSV2u5377ruPnTt38v777zNr1iyKi4t57733+MMf/sCuXbsYO3YspaWlXHzxxfzkJz/hnnvu4fvf/z5FRUX4fD5+/OMfc9hhhxlVVWLvHnvsMaZMmcKMGTN47LHHAAgGg6xevZr29nbOOeccVq1axaRJk/je977Hc889Z3w2VVVVsWfPHnp6eojFYjQ3N6NpGhs3bmT16tWcccYZFBcXM2fOHO6//37Gjx9PMBjkuuuuIxgMUlxczJVXXsnjjz/Oj3/8Y8466ywqKiqMc3v88cc58cQTOeaYY9i+fTvFxcVMmzaNH/zgB2zatImLLrrI2Le6upq8vDycTidjxowhNzcXgBtvvJGZM2fyhz/8gaamJubMmcP3v/991qxZQ2trKxdffPEXeLWFEEII8VmTYErsVxoadvONb36b/73lN0SjUWO9ruu0tLZy5Xev5saf/hfhcPhTP0dd3SbOPvdC3lv1/sfuG4vFuf6Gn/Hf/30T8Xg8a1swGOTGn/4X/3ndDVlTkeq6zsqVq7jkG9/iX8+/+KnPUwghPg8Oh4NzzjkHh8PB4YcfzujRo5k6dSq6rmO327FYLMawPJvNhtVqNSpaFEXh+eefZ/v27VnHtFgsrF69mmXLlqHrOkuXLsVut3PVVVfx4Ycf4vV6sdlsxONxY4hfmqqqxrr0z9n29nZ6enoIh8MynG8fhEIhfvazn/Gzn/2MYDAIwPe+9z0SiQRbtmwxhkparVY0TcsK+7Zs2cL48eOZMWMG5eXlnHjiiSxcuJCTTz6ZDz74gHg8jtVqZffu3axZs4aWlhZqa2s5/PDDGTduHJD83HM4HFgsFnRdR9M04/g7duzA6/Xyox/9iMcffxxN04zhnub3HJJTqFdXVzNy5Ejje7Czs5Pu7m66u7vp6uqiu7sbm82GrutYrVa2bNnCiy/KZ60QQghxIJOhfGK/EgyFWP7Ouyx/510OOWQqp37lFBRFIR6Pc8cdf+S5519g6pTJg4Z1mKuS0v0w9rY9oSUIhfr28Rg6/Xv5xSg3N5e5c+bw4xt+ytixo/nulVdgUVV6env55a9uIRKJsGDBvCHPRwghhktNTQ3r16/nf//3f0kkEpxwwglMnTqV22+/nSuvvBJd1/n1r3/NH/7wB2688UbC4TA333wz9957L8cffzwdHR0888wzbNu2jZ6eHm677TbOPfdcwuEwy5cvp7Gxkfb2dvx+P5s3b+bUU0+lq6uLQCDA22+/zUUXXUR9fT1btmyho6ODu+++m//6r+QfHH7/+99TXFxMbm4umqaxefPmQX8UENk6OztJJBLs2LEDgJaWFkKhED09PZx88sl0d3eza9cuDj/8cN566y0aGho45ZRT+MMf/oCu6+zZs4dbb72V6667DpvNxvLly3nuueeIRCIsXryYHTt20NfXx8aNG0kkEqxevZonn3ySKVOm4PP56O7u5s477+Saa67hrLPO4sEHH6Sjo8M4v0QiwQMPPMD555/P+vXr0TSNVatW8Zvf/IaOjg5uvfVWTj75ZLq6unjmmWe47rrraGhoYNu2bfh8Pnbt2pX1vZD+/vrNb37DN77xDeLxOH/84x+H6/ILIYQQ4jOg6DLO6KDl9/spKCjA5/ORn58/3KcDwIa6jZy25BwARo6s4eEH/sGIERW8+tobfPf7PyQajTB+3DiefvIR4xeXDXUbeeGFl+jp7WXK5EmcdtpXKC4qQlEUdF2ntbWNZ597noaG3UyfPpWysjIuu/wK/vrnuzjpxOPRdZ3t9Tt46uln6erq5pDpU1ly+mnk5eUSjUY55/yvkpuTwz/vvXvQDFLhcJhr/vN63l7+Dg/88+9MnTqF2357O3+9+x7++Iffc+wxR++3wdT++P4LIT699L/pj6MoCoqiZFW1qKpqVLIoikIsFgMwKlPS4ZDdbicej6NpmvGY9H6appFIJIyfebquo6oqFouFRCKBpmkoioLVaiUWi+3z483nKQYzX6+By+n3K03X9b1e13Sz8UgkAmBULGmalvV+l5SU8OMf/5icnBz8fj+/+tWv8Pv9WCwWVFU1vncGnqPFYsk6F/P3kvmc098L6cqr9POmb837ps9RwsvB5LNdiH3/XEz3TNxXuq5L78PPQfpn+r5Kfz6J/d++fCZJxZTYL114wbk899wL3HHXn/jeld/m/269jRmHTEdHx+dNDpvTNI3Hn3iaX/7qfyksLKS4uIgXX3yFp55+jjtvv42ammqampr55reupKmpialTp/DB6jVYbVbjw0TXdd7/YDVX/eBHeDweqqoq+fVvfs+KFSv59a03f+wPSIfDwX9e8wNWr1nLrb/5Hd+87FLu+cd9XHDBuRx15KL9NpQSQhy80uGEWfo/7Ab+gj8wZDAPsTb/x6B5P/OxB/5HY7r/3id5vPhoQ/U4TDO/X2ZDXdeB7/XA74X0Y7q6urjhhhuw2+2Ew2FjfSKR2OsvauZwc6hzM5/zwPNIHz99a95XAikhxL/L6XQyc+bMrCHmHycYDLJ+/frP8awOPnl5eUyfPv0TPaa7u5utW7d+TmckvmgSTIn90uxZMxlRUcGdd/2J7fX1tHd08H+33MSdd/3JCKaam1u45dbfsOiIw/nF//w/8nJzef+DNVxx5fe58w9/5uab/pt/3PcAuxt288c/3M6Riw7H7/fzn9fdyLp1yQ+TSDTK735/JxUjKrj7z3/A4ynglVdf5wc/vJbl77zL0Uct+sjzVBSFMWNGc83VV3HjT/+Ldes2UFVZyfeuvEL6ogghhPhSSk9EIoQQB7pwOMzKlSuH+zQOeoFAgBUrVgz3aYhhJM3PxX5JVVUu+frXmDHzEFaufI8rvn05Mw7JTtE3btpMV1c3X/vqhRR6PNhsNubPm8MRhx/GqlXvEwyG+PDDdUyePInDFs7HarVSWFjI+eedg8WSzGT9Ph91GzeBDn+75x/c9tvbk03RdZ0P1+3bX0IURWHJ6V/hqCOPwOvzcd21P6S0tESqpYQQB43P4+fdJw337XY7I0eOlD8K/BvSDe6FEEIIIb5IUjEl9lv5+Xn85PrruPvv9/L1r100qMRWVdWsYSFp6f4ZqqqgqhYSiSiaNnTpv6KoRt8Ki8WCAuTn5SWDsBmH7PO5OhwOpk6dwttvv8PkSZPkP+yFEPstVVW55JJLKC4uJhwOc//99+P1egGora3lrLPOIhKJ8NBDD9HZ2fmRx1IUhaOPPprDDjuMHTt28OSTTxpDtOx2O0uWLEFRFB599FGmTp3Kqaeeit/v56GHHsLr9TJp0iSWLFlCMBjkwQcfpLe3F0iW9J933nn8/e9/HzRUbW8OO+ww7HY7p5xyCn//+9/3Oozty6aoqIhzzz2XvLw8nnnmGbZv347T6eSss84iGAzyyiuv8O1vfxubzUZPTw8PPPAAs2fPZv78+fT29vL4448TCoWA5DWMRCJYLBZ6e3uJRqNEIhFaWlqG+VUKIYQQ4stMKqbEfktRFA45ZBq33PxLcnNzB22fOmUyIyrK+ctf/0ZjUzPBYIi3li1n+fIVHHXUItxuN/Pnz2XT5i288OJLBIJBGhub+Mc/HzDCqfz8fGbNnEl/OMw5Z53B97/3HS684DymTp3CoiMO+4Tn+5m8bCGE+FxZLBZqa2t5+OGHeeihh/D7/QC43W6+9a1v8fTTT7N69WquuOIK4w8CiqIwZ84cTj/9dEpLS41jTZo0iUWLFnH33XeTn5/PiSeeaGzTdZ329nYmTZpEYWEhl1xyCY888gj19fVcdtllFBQUcNlll/Hoo4+yefNmLr/8ciPUt1qt1NTUYLVamTt3Lvn5+SxcuJDTTjuN+fPns3DhQpYsWUJOTg6Q7BEya9YsVq5cSW1tLYsXL+bEE0/Ebrd/UZd12IwZM4bVq1fz2muv8bWvfc2Y+KOzs5NJkybhdrvxeDzcf//9PPXUU1itVs4880weeughFEVhwYIFxrEKCwvxeDx0d3cTDAY599xzufTSS6mqqqK8vJwlS5Ywa9YsVFVl+vTpzJo1i9raWqZNm8YZZ5xBdXX1MF4JIYQQQhyoJJgS+zVVVcnJcQ9ZgVRZOYKf/uR6tmzZxplnn8+Z51zA9676IbNmzeSKb1+Oqqpc/NULmXPoLG74yf/jzLPP56KvXWrMDgRgt9v4zx/9AHSdC756CV+9+DLOPvci/vLXv+P1+b7olyuEEF8It9vNZZddxle/+lWcTicAI0aMoKenh+3bt7Nq1Sry8vKM4Mfj8XDRRRfR09ODy+UyjjNz5kyWL19Oe3s7L7/8MjNnzqS8vJyZM2dSVFTE9u3bSSQSjB49mt27d9PQ0MDy5cuprKxk/PjxNDc3s2vXLt555x3Ky8uNc4FkgHbWWWdRW1uL1Wrle9/7Hr29vXz729+mtraW8vJyTj31VADmz5/Phg0b6Ovro7i4mHg8zuzZs1m4cOEXeFWHx+rVq1m9ejXl5eU0NTWh6zqRSIT6+nqjYXhFRQXf+ta3OO2004jH47S3t/PNb36TQw89lB07dgw65uzZsxkzZgyBQIDOzk40TeM73/kOnZ2dnHzyyUyZMoWLL76Yr3zlKwB84xvfoKenB7fb/YW+diGEEEJ8OchQPrFfqaqq5Jabf8mhs2cNuf0bl1xMMBTC4XCgKAqnfuVkJk+axNI338Ln8zFp0kSOPupII8wqLS3hz3+8gzffeps9exqZOnUyE8aP550VK5k2dQqKojBl8iTu/+ffeXv5O+ze08iIigqOW3wMpaUlaJrGVd/7Dlar7WP7lpxw3GJqqqspLPR8DldGCCE+G7FYjJ///OdEIhEuv/xypk+fznvvvUckEsHtdqOqKjabDUVRjOpSv9/PO++8w/HHH89LL73Enj17AAiFQng8HgByc3Pp6+ujsrKSuXPn8v7779PW1gZAf38/eXl5KIqCw+EgkUgQCoXIzc011mmaljWr2yGHHMLYsWP5+c9/TiKRYOfOnaxYsYL58+ezYsUKnE4nRx11FE6nk9mzZ/PnP/8ZgI6ODpYvX040GqWsrOwLvLLDQ1VVTjzxRCZMmMBf//rXQdu9Xi8//vGPicVi3HDDDUyZMoXCwkLuvfdeZs+ezbx582hoaBj0OE3TaG9vp7e3l1gsRm1tLZMmTSIcDuN2uwmHw9x3333s2bOHV155hWOPPZZ3332Xbdu2fQGvWgghPjuFhYU4nU7a29spKipi7NixrFq1ap8f73K5OOqoo3j11Vf3OjtpmqqqVFVV0dLSwsKFC9myZQtdXV3/7kv4Upg6dSper5fm5mbGjRuHpmns3LnzYx+Xm5vLggULcLlcrFixgu7u7k99DlVVVdhsNhoaGigrKyM/P5/6+npj+5gxYygsLGTNmjWf+jnE0CSYEvuVosJCzj/v7CG3KYrCokWHD1pXWzuG2toxWevM9z0eD0tOPzVr3QXnn5O1XF5extlnnTHoGBaLheOPW/yx560oCtOmTWXatKkfu68QQgyngoICvvWtb1FfX09NTQ3PP/88S5YsYdmyZfj9fr75zW+Sm5vL+++/T39/P5Ds+WS1WtmxYweTJ0/m3XffBWDFihVcd911WK1WZsyYwQMPPMCmTZtYu3YtkKzU0TSNXbt2YbVaueSSS6ioqOCtt96ivr4eXde59NJLKS0tNcKktM2bN3PPPffw1a9+lT//+c8kEgl0XTdu01/z58+nrq6Ovr4+LBYL8XgcXdfRNG2f+1MdyM4++2zOPvtsnn32WQ499FDa29spLS2lubkZTdOoqanhvPPOY8eOHdjtdrq6urBarf+fvfsOk6q6Hz/+nt52Z3tvbF+WsiwsvUhXQFQEBUswMRo1xSTGFo2/b4rGFEsSjSWaSKKosYuV3hcEFljYwvbeZ7ZM7/f3B+4EEBUUXIHzeh4edua2c+6d3XPnc8/5HHJzc0lNTaWsrCy4r2PPqyRJWCyWYP6wsrIyTCYTTqeT+vp6XC4XDocDrVaL0WikvLycUaNGsXbt2iE8G4IgCKdv2bJljBgxgl//+tdce+21XHzxxdxwww2YzWZyc3PJyMhg7969qFQqxowZQ1VVFZ2dnUyePBmz2UxLSwuLFi1i+/btjB49GpVKxSeffEJ+fn5waPWoUaMwmUx0dHTw2GOP8eSTT6LRaFAoFMTFxTFu3DgaGxtpbGyksLCQkJAQSktLgw94LgTf+973iIqK4oEHHuDJJ5/kww8/ZM2aNYwYMSIYwEtKSqKvrw+tVktDQwNqtZr77ruPjo4OnE4nY8aMoaKigsLCQpqbm2lsbCQvLw+Px4PZbGbUqFH09/fT0NCATCZj7Nix1NfXU1VVhSRJREZGsnLlSn75y1+ycuVKSktL0el0JCcns3fvXjIzM8nKysLj8dDQ0EBKSgo9PT3k5ORgNBppb28nLi6OyspKenp6mDRpEl6vl71794rZbL+ETLoQ7tqEk7JYLISFhTEwMIDRaBzq4gjfMHH9BeH8Mvg7fSqSkpLIyMigoaGBtrY2hg0bRmtrK3K5nIKCApxOJxUVFcEnv3K5nIyMDCIjIykvLw8mywaIiooiPz+fxsZGWlpajjuOSqUKDjHT6/UUFBTQ399PVVUVgUAAnU5HQUEBFouFI0eOBIeeKRQKEhMTaWlpITU1lb6+PiIiImhubiYxMRGz2YxcLicmJobFixfzwgsv4HA4AEhLS6O5uZnQ0FBUKtXXenJ6LsjIyCA6OjoYSAoNDSUyMpKtW7cSFRVFR0cHmZmZxMfHB2/so6KiyM7Opq+vj5qamuB5j4iIIBAIoFKpcLvdeDweRo8eTW1tLR6PhxEjRtDX10ddXR2JiYl0dnbi9/tJTU0lISGByspKBsQw+G8F0bYLwqm3i+np6YSHh9PZ2cn9999PdXU1JpOJ4uJifv7zn7N9+3ba2tq4+uqr2b17N729veTl5WE0GklMTOTll19mxYoVfPjhh1x88cXY7XZKSkq48sorOXz4MO+99x7z589nypQp/PWvf+Wuu+7iscceY9asWbz//vtce+217Nq1i4suuog333yTe++9lw8//JCoqCh+85vfBP9Gn+/+8Ic/IJfLSU1Npa2tjaamJj755BNmz55Nbm4ud955J3fddRd6vZ4nn3ySqqoq4uLiePDBB/nRj34UfLg1YsQI5s2bx+TJk/nLX/7Cgw8+yN///ncuuugiduzYwfz58/n444+ZOnUqu3btYvr06Tz++OPBQNcf/vAHXn75Zb773e+yevVqli5dyv79+ykoKGDbtm2kpKSQnp7Os88+y3e+8x02bdrE3Xffzfr161m0aBEffvghCQkJlJSUMG7cOFQqFe+88w7bt28f4jM8dE6lTRI9pgRBEAThAtPW1kZbW1vwdUNDQ/Dnkw1fCAQCx3VlP5bZbP7cmy2v10traysADocj2NNqkNPpZPfu3Z/Zzu/3B4Ncg8MGrVYrwHEzxLW1tbF69epgUAqgqakJIJjU/XxXX19/3FAHvV6Px+PB5/MFz1Vtbe1x189sNp80YDc4K+Kx9u7dG/z52M/G4HWFo+d88LwLgiCcawbbwGuvvZb4+Hjg6Cyl7e3tmEwmNm7cSHh4OGq1mi1btuD3+1m4cCE2m43GxsZgT5ikpCSMRiONjY1YLBbsdjurVq3iiiuuQC6X43A4UKlUtLe3U11dzZw5c9DpdBgMBjZv3kx2djYxMTFUVlbyxhtvcOedd6JUKi+YWWYDgQCvvfYaOTk59PX1MWLECK666ioqKioYO3YscLSdSk5OPi5VgFqtJi0tDbvdjlar5bLLLsNsNqNSqdDr9Rw4cICPP/6Y+fPn09nZid1ux2AwoNPp2Lx5M7m5uURHR9PQ0IDH42HDhg3ce++9fPTRR4SEhNDa2sqmTZuYPn06Go0mWN64uLhgyoDm5mZeeeUVsrOzefvtt7n77rsZNmwYer2e5ubm4x7oCScnkp8LgiAIgnBO8vv99Pf3D3UxvlUcDocYLiAIgnCaFAoF0dHR/PCHP+RnP/sZ69evp7+/H5vNxr333kt8fDzbtm3jF7/4BbNmzWLVqlWoVCr8fj/Nzc3U1taydu1aWlpaMBqN1NTUUFFRgdPppLm5mYiICNrb2+np6aG2tpbFixfT2NhIW1sb69at495778XpdLJnzx5qampwuVzU1dVdML2l4OiDlq6uLl599VW6u7tpaWmhqamJ2NhYqquriYuLw2w2s3r1aiZOnAgcfQj17LPPctttt3HvvfeSlJREc3MzsbGx1NbW0tfXR0VFBS6Xi//+97/MmDGDhIQEGhoa2Lx5M/fccw8Wi4XDhw8Hy1FcXExraysff/wxu3fvRqvVcscdd7BmzRqamppob29n7dq1LF++HI/Hw8DAANXV1Xg8Hqqrq3G5XNTU1PDGG29gtVpRq9XBh2zC5xND+S5gYijXhU1cf0E4v5zOUD5BEM5vom0XhDPXLsrl8mCA6PN+HjSYp/bEr9jHrnuydU62L+GoUzk3MpkMmUz2hdfm4osvZuLEiZhMJl544QWcTufn7lsmk33p9Tlxnc8rF3z283ChEUP5BEEQBEEQBEEQBOErOjYg8Xk/D/q8AMSx655sHRGU+nyncm4GJ+34om3Wrl3LunXrvnS9wf19WRlOJdh0oQekTocITAlnlNvtobT0EBISGenpxMREA9Db24vRaKSjo5Pk5KTjZs47ltVmQ6lQotNpj3vf5/PR2NhEVlYm/f39VB6pIiwsjKzMDNRq9XHr6PV6oqOjgu+fqubmFiIjI5AkCZ/fT8SnU6B/Gb/fz6FDhwlIEunpw4gID//c+lksFo5UVZOakkJIiIEjVdUkxMcTGRlBRcURoqKj0GrUNDY1Ex4eTm5O9ufuSxAEQRAEQRCEC49Go8Hj8YjAx2kS5+vbSwSmhDPKbrfT0NRE0dhCFEoFW7ftID4+jm3bdzBxwnj8fj82u52WllbUajWBgJ+iceOoqKhEq9PS1dWFz+dnRP5wGhqbyMvLodfcS3tHBxaLlaysTDo6OvH5/LjdbnYW7yYjYxj19Q1kpKdTXVNLamoK/QP9JCUm0dbWjkKpoKOjg/zhwzlSVY1OpyM2JhoJCUkCg15PbGwMe/eVEJAkRgzPw+P10tLcislsJjMznYA/gFwuJyw8jDCjMThT1dGpyf1U19YxeeIENm3eyoxpU6k8UkVMTDRRkZEcLi8nKyODtLRULBYrEeHhFO/ajVanJTsri92f7GH8+HEYjaF88skeLrlkPp2dXeh0uiG+moIgCIIgCIJwfsvPzyclJQWArVu34nK5GD58OMOGDUOSJPbt24fJZDrptgqFgsLCQkJDQ9mzZ08wyXVISAhjx45l3759KBQKJkyYQG9vL4cOHUIul1NUVIRSqWTv3r24XC4ACgoKaGxs/NLZTVUqFZdeeint7e2UlpYeNwHIuSwqKoqioiJaW1spLy9Ho9EwZswYmpqaiIiIYNiwYQwMDLB3715yc3OJj4+ntLSUnp4eAEaPHk1nZyfx8fHU1NSg0+no7e0d4loJp0okPxfOuIGBAdo7OvF6vJhMJpxOJwnx8SQnHw0UtbS0kJaaQntHB6GhRqqqqukfGGDPnn3o9QaysjLZsbMYp9PJgQOlVFXXMHLECIIBbpmM8PAwsrOy6O/vZ+u2HTidLsorKgHo6OgkEJDYtfsTOjo72bFjJ3a7g/KKSurq6snKzOBwWTllZRVER0cRHn507HloSAhpKSns2VuC1WqjpbWF7OxMKiurKCuv4HBZOVqNBp/Px46dxews3oXX6wVAp9WSnJyETqdl9yd7MZvNHDp0mD1791EwalSw51hsbAztHZ0MH56H2+UmOTkJg8GAXqenf2CAzMwMtBot0dFRdHf3fOPXThAEQRAEQRAuJL29vWg0GsaNGxd8r6+vj/b2dqZOnYpWe3Qkh1qtZurUqYwYMSI4omHatGnk5eUhl8u59tprkcuPfr3W6XTMnj2buLg4rr/+ejweD+PGjWP8+PFccsklJCYmEhYWxpIlS4L7KigoIDw8nKysLGJjYxk7diwzZswgJyeH6dOno9frARg1ahQmk4kxY8Ywbdo0xo0bd16MsBg7diwWi4UlS5YQHh6OSqWisLCQ3NxcZs2ahdPppKenh+joaObPn4/VauXqq68OnvNRo0YRERGBWq1mwoQJ3HrrrcTFxZGWlsZFF11EeHg4aWlpjBgxgtjYWCZMmEBhYSEKhWKIay6ACEwJZ5hCIUer0eB0OhmwWAgPD6erq5uQEAOtrW2EhoZgNIYRERlBUmIiUVEReLwe3B43YWFGwsOMNDe3kJWViUKpIH94HiEhIRwsPRQc3qdSKikrK2fzlq2MLRxDXk4OCqWCnJxsQkNDCAkxkJKcTP/AACPyh5OTk41arSYrM4OMjHTCwoxotVoMBgNOhxPTp1Nmh4SGkJ+fR2JiPKGhIcjlCg4fLicrK4OY6GgiIyPRaDQolUpmTJ/G9GlTUalUyGQyBiwW1q7bQHhYGKNHj0Cj0ZCbm0taaip795XQ2nZ0yu5du/dQ/elMGxkZ6axfvxGlUkFbezt79uzD7fZQU1uLxWJFe8x0pIIgCIIgCIIgnHk9PT3k5eXR2NgYHBXR2dkZnBmvvf3ofXxOTg4TJkzAYDAEt83Pz2fDhg1s3bqV8PBwMjMzmTVrFoFAgIaGBkJDQ9HpdOzYsYMNGzYwatQocnNzWb9+PZs3byYpKQml8uggJplMxtSpUxk/fjzx8fEsWLCA5ORkrr32WuLj45k9ezYajYaRI0eyb98+dDodSqWSBQsWBHt8ncs2bNhAY2MjDocDl8uFzWbjyJEjSJKEw+EgPz+fyZMn4/F4MBqNXHTRRfT29h43PE+tVlNUVITX68XhcKDT6VixYgV6vZ7rr7+eBQsWMHPmzOD51Gq150VQ73wghvIJZ5TRaOSaFVcjSRIKhYJhaanBKHRAkpB/OmOCTCYjamokMpmMYWlpeH0+FHI5crmc3NwclEolXq8PlUpJSkoyPp8fpfLofjIy0klNTUEul6NQKEhJSQ6umz5sGDLZ0T/sy668AoVCQWxsTHB5YmICbrcbhUJB0bjC4xqWSRMnIJPJmDtnNgCjRo7A7/ejUqnISE8H/jezwrF/wFQqJSuuXgYQbFgSExJQKBSf1ic7+P7UKZOYPGlCsOzD83JRKJUgSQzPy0Umk6FQKMjOygwGvQRBEARBEARBODvi4uL4xz/+wQ033EBCQgJmsxmXy8XcuXNZs2ZNMPF1c3MzVVVVTJ8+ncOHD+N0Ounv7yc1NRWv10sgEMDlcuF2u4PbuN1u1Go1YWFhJCcn09PTg1wuJykpCbvdjsvlCq6rVqsZNWoUn3zyCX6/nyNHjrBv377gkL/p06eTn59PQ0MDDocDu93O3r170ev1x32nOVelp6ezcOFC3nzzTXw+XzCtiUwmY+PGjfT39/ODH/yA3Nxcmpub+fDDD7n22mtRq9W43e7gunK5nP7+fjo7O/F6vURGRiJJEo2NjSQkJLB27Vo6OjqIi4tj+vTpVFRUfOnwSeHsE4Ep4YySyWSoVKrg62O7Rp7YSfLYZZpjEpUPJi3XaNQnXVculx+X2Fwmkx237qBjnz4cu1yr1TLzounBZcfu98T3Bo/7RQGiE+t8bB2O1uN/PZ8Gy3Sy9Y47H6K3lCAIgiAIgiCcdTk5ORQUFDAwMIDT6WTq1KkcPHiQ1tZW2traguuFhYWRmJhIX18fPp8PgHXr1nH11VczefJkPvjgA1paWmhpaQGgvb2dvr4+PvroI773ve9htVp5/fXXMRgMLF26FIA1a9YEe2k1NDTw4YcfMmnSJLTao7l3nU4n7e3tOBwOzGYzI0eO5M0330SSJFpaWvB4PHR2dgZzW52r5HI5S5YsISQkhPnz57N7927S09NpbGxEJpNRUFBAVlYWLS0tHDlyhPz8fJYtW8bhw4fxeDwAdHR0YLVaaWlpobu7m6SkJEJCQti1axdpaWns27cPj8eDxWJBr9eTkpKCxWIJXkthaMkkkZr+gmWxWAgLC2NgYACj0TjUxRG+YeL6C8L5ZfB3WhAEQbTtgnB67aJMJjulGds+b70v2/5U9/9FQkJCSEpKoqqq6mvt51wQHh6OVquls7Mz+J5cLg/2LoPTO+cnW3ew44EIh5x9p9ImiR5TgiAIgiAIgiAIwgXrVIMTn7fel21/JoIfNpvtgghKAfT393/mvWODUnB65/xk64qA1LeLCEwJgiAIwgUkJCQkmLfhdDmdTmw22xkukfBVyWQyIiMjg0PRT4ckSfT19QWHkAiCIAiCIAwVEZgSBEEQhAtIaGgo0dHRX2lbk8kkAlPfIkcn+Ij9TP7CUxEIBLBarSIwJQiC8Dny8vLo7u5Go9HQ0dFBUVERVVVVWK3WL91WpVIxYcIEWltbUalU1NbWfu66er0enU6H+dOZwr+sTBaLJThTYGRkJG63+5RyTMnlcuLi4ujo6Pjc5ZMmTWLPnj0i75LwjROBKeEbI0kSgQB4/Udn51MpvzipuCAIgnDmdXR0fO5NqXBu8fl8VFZWDnUxBEEQzmkGg4Ho6GisViuhoaFYrVY0Gg0hISGEhYVx5ZVX8uSTTzJr1izi4+Opra3lyJEjACQkJBAbG0tvby9KpZKsrCzKysrQ6XSkpqYSGxtLdXU16enpZGVlUV5ejs1mIzQ0lJ6eHlJTU5k4cSIRERH885//xOl0YjQaKSoqorm5GYfDgcPhQC6Xo9FomDlzJs3NzQwbNoyDBw9y6623Ul5ezp49e4iJicFutxMdHY1Wq2X37t1otVrGjh1LX18fSqWSK6+8kr/97W/o9XoyMjLYt28fbrebcePGIZfLg7MCCsI3TQSmhG+EJEmYLQHe2WaltMZNqF7OnCI90wv0qFX/C0719vVh6jGhVqtJSUkOzlTncrlQqVTHzVx37L57ekxYLBZiYqIxGo3BgJfNZsPj8eL3+4mOjjrlQJjH46GpuQWtRkNiYsJnjitJEk6nE51OF9ynw+HA5XITGRmBJEl0dnVht9lJSUn5zKyBTqcTh8NJVFTkqZ9EQRAEQRAEQRDOqIiICObMmYMkSWi1WqxWK3v37mX8+PE0NDRQVlaGTCYjLCwMu93OsmXLePDBBwFYvHgxkiRRU1NDYmIiDQ0NrFixgsrKShoaGhg3bhxtbW0UFhZSXV3N8uXLKSsrw2azERYWRk5ODnFxcezcuTPYS2nx4sW43W5aWlqYO3cuGzZsoLCwEJPJRGpqKiUlJVx00UVYrVaUSiWVlZUsWLAAtVrNxo0bCQ8PZ/r06VitVoqKitizZw86nY6EhARKSkpIS0ujqKiInp4eFixYgEKhoKGhgfz8fEwmk+hJKwyJ009KIAhfgcMl8bfXe1m91kJVs4fd5U4eeaWXrQccxyWeO3DgICaTGZ/PR3t7B3v3ldDY1Mz6DZso2X+Ajo5OSvYfwGq1UlF5hNbWo1O47v5kDxaLlc1bttHV3U1tbR2Hy8ppa2+nobGR7p4eWlpa8Xg8tLS0cuRIFZWVR+jr6+PQ4TLq6uqxOxy0tLbi9/sxmcwcLiunovIIhw6XUXmkikOHynA6XRwsPURLayuvv/k29fUN1Dc0UlZeQXd3D3X19QDU1NRy+HA5VquN3t5eampqOXCwFJfLRWXlEXbt3kN1TQ2NTc0cLD0UnOZUEARBEARBEIRvjtPpZPjw4VitVtLS0hgYGECtVtPe3k50dDTFxcVER0eza9cuamtraWlpAUCj0WAwGHj55ZdJT0+npKQEk8lEb28vqampmM1mAoEAI0eOpKSkBLPZTG9vL1lZWfT29jJ+/PjgvkpKSvB6vcjlcvbt20dGRgYxMTEkJibidDrJy8vD4/HQ39/P4cOHsdlsyOVyurq6qK+vJyIigtdee42pU6cSCARQKpWEh4fjdDqpqqqitLSUyMhIdu3aRU5OzqcP7z3U19cTHx9PSUkJcrmc0tLSobwUwgVMBKaEb0R9u5e9FS4CEqTGKwnRy3G4JN7bacPl+V9gyuv10d7RgcVq5UhVNWq1mr37StBoNERGRLBu/Qba2trZu28/27fvRG/QA6BUKsnNzSEnO4vKyiq2bt9BWVkFbW3t+Hw+WlpaaGxqYl/Jfio+DQztP1DKwdLD1Dc04g8EKCnZT3l5ZXA60b6+PjweD06nk46OThxOB4fLyiivqMRud2AwGAgPD6ezs5OSkgO0tLYG69Ha1s7oUSMZM2Y0SqWS+oZGZMjYsnU7Tc0t6PU6bDY7Gzdtpra2jrq6hm/8mgiCIAiCIAjChc5ms1FfX8+WLVtoaGhg8+bNaLVaDh8+jMPhYPTo0cHXBoOBgwcPAke/f1RVVeFwONi7dy8LFy6kqKiIDz74gO7ubtxuN4cPH2bPnj0sWLCACRMmsGbNGpqbm1mwYAE6nY6WlhYkSSI3NxcAnU7H5MmTMZvNNDQ0YDKZWLZsGWazGa/Xi9vt5vvf/z6lpaV0dnZiNBpJT0+nqqqKgYEBfD4fMTEx1NbWcuDAAUJDQ1m5ciURERHY7XZGjx7N/v37MRqN+P1+ysrK6O7u5vrrrycQCNDW1jaEV0K4kMkkMU/iBctisRAWFsbAwABGo/GsHqv4sJMHnuvB54eLCnVUNXvoNPvJSlLx15/HEaI7GiPdtHkrXp+XmKhorDYb+cNz2X+glLAwIzKZHJPJhFarJSN9GDW1dcybOxuA997/EIPBgM1uZ0LROIp37SYyMpLo6Kjg04XRo0fx1tvvctmli9hZvJuYmGgiIyNQKpVkZ2XynxdfZsrkieTl5dLR0UltXR3ji8ZhdzjYsaMYlUpFVlYmLS2tOBwOVColKSkplJdXoFAoiIqKJCwsjMIxBTS3tHDgQCnh4WGfji2vQavTEhsTQ0NjE0qlkojwcEwm09FtCguICA8/q9fgRN/k9RcE4ewb/J0WBEEQbbsgnLl2cfCh9ddZ78Rlx77+sv2f7rZftP7prCMIZ8qptEkiMHUB+yYDEy3dXu75ew9tJh/j87Q0dHox9/tZNNXAz5dHolQc/WM4MDCA1WZDIVeg1WlRKVV4fUe7tbqcLjRaDXabHaMxFL/fj1qtAcDpdOByuzHoDajVapxOBx6vF4PegISEFAh8+v7RvFB2u4NAIHB0ynQZ9Jp7Kdl/gDlzZqFWqwn4/fj9AVRqFUhgs9sIBAKEGEKCCRFlMvD5/UiBAIHA0THpcrkMperoNna7DbfHQ3hYOC6XC6/Pi9FoxGF34PP70Ov0+AN+XC4XMdHRqNXqLzmLZ5YITAnC+UUEpgRBGCTadkEQ7aIgfFucSpskkp8L34ikaCXfXRTGP9/v51C9GxkwPl/LijlGFMcMKO3o6MRmd3zp/jo6u85wCSXSUlOpra0/w/uFzs7u4M9tbSefCUulVBEbG3PGjy0IgnAycrkcmUxGIBAQT0jPcYPXctDpJq2VyWTI5fIzkux2cNYop9P5tfclCIIgCMKFQwSmhG+ETAZzi/QMT1PT0OFFq5aRl6YmVP+/G2qZTMbw4XlDXFJBEITzm06n409/+hN6vR6Px8P777/Phx9+KAJU5yCZTMaPfvQjioqK0Ol09PX18cc//pH6+lN7yBIdHc3dd99NVFQUxcXFrFq16msFqLKzs1m5ciW/+tWvxOdJEARBOGU6nY60tLTT2sZqtYqcWOcREZgSvhEymQyZDFLiVKTEqYa6OIIgCBcslUpFZmYmd9xxBzqdjj/84Q988sknzJ49m7y8PHbs2EFxcTHXX389Wq0WlUrFc889R1hYGNdddx1msxm5XM7q1asZPnw4l1xyCRUVFaxZs0ZMMf0NkySJl19+mdraWlauXMmDDz6IQqHgjjvuQCaT8eqrrwIwf/58IiMjsVgsrF69GofjaM/kuXPnolAoePDBB4mLi0Or1bJixQoUCgVhYWH85z//oa+vjyVLlpCZmcmaNWsoLy9n9uzZTJ48mZ07d7JlyxbGjh3LpZdeis1mIz4+fihPiSAIgnAO8vl89Pb2ntY2brf7LJVGGApiVj5BEARBuMBotVrGjx/PtGnT6O3txeVy0d/fT3NzM/fccw/x8fHccMMNNDY2MnbsWC666CLuueceTCYTzc3NfP/73ycpKYn777+fAwcOsHTpUsaNGzfU1bogmc1mOjs7sdlsdHV1ce+999LW1kZbWxu//vWvSU1N5aabbmL37t3k5+ezbNmy4LYlJSUMGzaM//u//yM2NhadTsctt9xCZ2cndrudu+++m3nz5jFr1iwqKip44IEHKCgo4NZbb2Xv3r3cfvvt5Ofn8//+3/9jx44daDSabzxfoiAIgnDu83q9dHd3n9a/gYGBoS62cAaJHlOCIAiCcIFRKBRER0ezbNky7rnnHqKioli5ciWbN29GrVZjMBgwm81s3bqV3NxcwsPDCQkJoampidbWVtxuN2FhYSQkJDBixAjq6+tFb6lvAbVaTWxsLJs3byYQCHD11Vej0WhoaGhg9+7dJCYmkpf3vyHznZ2d3HbbbaSlpfH73/+exsZG+vr62LFjB7GxsUybNo2MjAyioqLIzMykvLycpKQkYmJiGDlyJOXl5YSFheH3+9m5cycej4fs7OwhPAOCIAiCIJyLRGBKEARBEC4gkiTR2dnJP//5T8rKyli5ciV/+9vf0Ol06PV6AoEAgUAAp9OJJEl4PB7cbjd///vfufXWWxkYGCAQCNDc3MyOHTuIiIhAJpPR0NAw1FW7YAUCAdxuNw6Hg/Xr1/OHP/wBv9/P2rVrcTgcDB8+nIcffpicnBwefPDB4HbTpk1j5cqV9Pb20tHRwcDAAJGRkfy///f/iIuL44MPPmDPnj2MHz8eo9FIe3s7u3btoqamBqPRiNvt5vDhwzQ2NvLYY49hMBjE0ApBEARBEE6bTBLZKS9Yg1OoiimFL0zi+gvC+eVUp8WWy+XExcXR1XV0dtOEhAS6u7uJjY1FpVLhdrsxm81ERkbS1dVFWFgYPp+PadOmMWnSJEJCQvB4PDzwwAOoVCpSU1Pp7++nu7v7S44snC0ajQaj0UhPTw8KhYKUlBQAmpubKSws5IYbbuCvf/0rDoeDjo7/zQ4rk8mIjY0lPDyctrY2VCoVzzzzDPfffz8+n4/m5mYCgQBRUVFER0fT0tKCw+EgJCSE5ORkurq66OvrQ6vVkpaWRm9vLzKZTHwWvgVE2y4Ip94uCoJwdp1KmyQCUxcwEZi4sInrLwjnl7N9A67T6cjIyEAmk1FTUyN6xpwjDAYDMTExNDY2fum6CoWCzMxMamtrCQQCZ79wwlkj2nZBEIEpQfi2OJU2SQzlEwRBEAThSzmdTsrLy4e6GMJpstvt2O32U1rX7/dTXV19lkskCIIgCMdTKBRMnTo1GLw4fPgwTU1NX7pddnY2ubm5VFZWUldX95WOO2fOHLZu3SoeuA0xEZgSBEEQBEEQvpBKpUKSJHw+31AXRRAE4YxRq9VkZmYik8lOeRun0ynyKp4FWq2W66+/nn379tHY2MjMmTNxu93s27ePnJwcQkNDUalUfPLJJ3g8HkJDQ7n//vvZuHEjY8eOpa+vj8zMTMLDw6msrKS9vZ2CggLCwsL45JNPUKvVTJgwgZaWFqqqqhgxYgRJSUlcddVVfPLJJyIwNcREYEoQBEEQLkAymYyCggIqKyvxer2MGjWK2NhY9u/fj81mY9KkSajVaiRJ4uDBg5hMJgBmzJjBnj17cLlcp3WsrKwsWlpaGDVqFOHh4QD09PRQW1vLhAkTAGhtbQ0OI4uNjSUiIoKqqqrTqldUVBR9fX3BoWixsbHIZLJgTq3zjVKpZMyYMRiNRvbs2YNCoSA/P59Dhw6RnZ1NTEwMcPTcVlZWApCeno5KpTrl3lEymYxly5Zx6NAhDAYDe/bsOWv1EQRB+Cb5fD66u7tPKzAlAvRnnt/vZ8OGDUyaNIl169Yxffp0lEol4eHhxMbGsmDBAsrLy4mJiSEqKop33nkHj8dDV1cXo0aN4qOPPmL48OH8+Mc/5t133+Xee+9l9erV3HDDDbS0tDBs2DCGDRuG1Wpl6dKlrFq1iu9///usX78+mJdRGFryoS6AIAiCIAjfvBkzZvDggw8SHh7OtGnTmD9/PgA/+9nP0Ov1SJKETCbjmmuuISoqCjgaoJg9ezY6nQ6DwYBerycsLIyYmBgSExPR6/UMGzYMjUZz3LHmzJnD7NmzUSqV3HDDDSiVSiRJQpIkkpOTWbx4MWq1mhUrVjBjxgxkMhnXXXcdP/3pT9FqtZ8pe0xMDPHx8chkMuRyOYmJiURHR6PRaPjZz35GTk4OCoUCONpN/5ZbbiEpKeksn9GhMWnSJAoLC0lMTOTmm28mLy+P73//+8TGxgbP8cSJE5k4cWJwm8zMTIYPH45CoSAiIgKdTkdERATDhg1Dq9WSkpISDB4CJCYmEhoais/nY/r06aSmphIaGjoEtRUEQTizAoEAZrMZk8l0yv/6+/uHutjnNZlMRlpaGjt37mTv3r2kpKQQCATYu3cv+/fvJz4+Hjh67VavXs2aNWu4+eabCQ8Pp6WlhS1btqBSqUhLSyMyMhKr1UpfXx/Z2dloNBrq6uoIDw+nt7eXbdu20dvbO8Q1FkD0mBIEQRCEC87w4cMZO3YsJSUlwNEeNKWlpRQXF3PllVcSGRnJtm3byM7Opq6ujtra2uO2j4qKYunSpbz88svcfPPNDAwMkJycjN1up7+/n0AgwOOPP44kSRgMBmbNmsVDDz2Ez+cjEAjQ39+Px+Ohra2N6OhompqaWL9+PW63m/T0dKqrq1GpVOzevZvx48ezffv24LEnT57MtGnT8Pv9lJWVYbFYmDt3Lm1tbWzZsoXs7GxmzJhBS0sLdrudjo4O3n33XZYuXcrf/va3b/Q8fxN2795NcXExeXl5XHHFFezZs4fp06cDUFpailarZdasWXzwwQfHbadQKLj66qsZGBgAYO7cufT09JCcnExFRQXZ2dn83//9X/D8bty4Eblczrhx43A4HGRmZvL//t//w+FwfON1FgRBEM4/kiTR1NSEzWbj1Vdf5dprr8Xj8fDcc8+Rl5fHkiVLkMvlPP3008DRIeZz584lJSWF4uJirFYraWlp/PSnP2XHjh2sW7eO7OxsoqKiWLduHf/4xz9YuHAhjY2N7Nq1i8LCQn7yk5/Q2dkpesF9C4jAlAAQfKrq9/uHuijCGSSTyVAoFKfVPVkQhPNbVFQUt912G6+++ipXXnklCQkJfPzxx9xwww2MGDEClUqF3+9HLpezZMkS3n333ePaBp1Ox3333ccbb7xBa2srAKtXryY/P5/k5GTeeOMNHnjgAZRKJV6vl/DwcKxWKw6HA7VajUajIT09Ha/XS19fHwDTpk0jNDSUQCDAv/71LxYvXozf78fn83HllVeya9eu4E3jggULaG9vx+VyMXHiRN5++230ej39/f10dHRQUVHB66+/flzC78bGRq666irkcvl5N9ucz+cjPT2dq6++mhdeeIETJ1ueNm0a1dXV9PT0BN+TyWQsXryYnp4e7r//fhYsWMCWLVvYtm0bv/rVr3j22We5/fbbiYmJwWAwoNVqaWhoIDMzk9LSUp555hl++ctfEhERIQJTgiAIwhkhSRL//ve/g68PHToU/I66d+9ePv74Y7q7u4PtnMPh4LHHHgu27bm5ubz00kt88MEHwbb+d7/7XXD5kSNH2L59e3DZww8/fF7eF5yrRGBKCHK5XOd9tPhCDNDo9frgkBZBEASAXbt2kZKSQnx8PKmpqXR0dLB+/XokSSIyMpKOjg5ycnJQqVQcOXLkuG3lcjlvvPEGU6dOZdeuXfj9frxeL16vF5fLRSAQIBAIBP/eWiwWQkND0Wg0SJKE3W5nz549uN1uXC4X0dHR7Nq1i7///e94PB6MRiPZ2dmsX78ev9+P3W5n+PDhHD58GDiaK8lsNlNXV0dJSQn9/f289NJLXH311dTW1iKTyUhMTMTr9ZKYmEh1dTXx8fHH5Z06nxQUFHDzzTfz0ksv4XQ6j1um0+mYO3cujzzyyGe2q66uxuPxMH78eCRJwu124/f7cTqdBAIBfD4fcrmcefPmsXHjxuC5c7lcSJKE1+tFLhcZIQRBEISz49g2+z//+c9nHrycuF5VVRXV1dWfWe/Y/Zx4H3A+3hecq0RgSggKBAIolcqT5vM41zmdTvx+fzBnyoXk8/6IC4JwYTKbzbzyyisAdHZ2UlJSgkqlYsyYMej1ev7xj3/gdrsxGo28/PLLx/WWkiSJjz/+mOLiYtrb20lKSmL79u04nU5aWlro7+/H5/OxZcuW4HZWq5V9+/Zx1VVX8frrr7N//34uvvhiJEmipaWFPXv2sGvXrmAy9dDQUF599dXgMMOamhri4uKCZXjllVe49NJLGTduHOvXr0ehUDBp0iSqq6upra3lgw8+YPLkyfT395OWlobZbObqq6/m5Zdf/qZO8TcqKSmJ+vp6xo4dS1xcHGvWrGH37t309/cTFhbGxx9/HExcP6ihoYHW1lY6OjqYPHky9fX1eDwe3G4327ZtCz6d1mq1aLVa6uvrAejt7WXv3r3A0SGEFovlG6+vIAiCcOE51e8zX7SeWq3G6/WK70bfUjJJXJkLlsViISwsjIGBAUJDQ7HZbCgUCnQ63XkVvJEkCYfDgc/nC9brfKrfl9HpdCiVn41BH3v9jUbjEJRMEIQzafB3+ttIoVBQUFBARUXFac3mdyYkJyejUqnE1N5fQXJyMj6fj87OzqEuinCaRNsuCKfeLo4aNYqsrCxsNhslJSUiGfYQiYyMZNy4cbS2tlJTU8PkyZPR6/XU1dXR0NBAYWEher2effv2BYeRFxYWUlNTg81m+8J9y+VyLrvsMrq7uzl8+DBWq/WbqJLwqVNpk0SPKSFocAzv6QRtJEkKDtkY/HeiQCCAJElfOpxs8PiSJCGXy5HJZPj9/uCsS1+VTCYLDi05nSFtJ54PmUwWjLCfWM9j47uBQCBY/sHuoSdue+z2g+dwcJuTlePYugiCIJxr/H4/+/fvH5JjD+bBEk6fOHeCIFwIli9fTnd3N3K5nClTpvD4448zZcoUrFYr+/fvp6CggNDQUJqamqivr2fChAmoVCrcbjelpaWMHTsWlUrFrl27cLvdQ12dc1ZRURF2u52lS5fy4osvMn36dNauXYvNZmPGjBkkJyfT09PD8uXLWbVqFZIkUVhYSFdXF/Hx8ZjNZlJTUzEajbS3t5OQkEBpaSlWq5WcnBxcLhd5eXmEhITQ19fH/v37RX7lbxGRHEAIGgzEnA5zby9P/v0Z/v2fl+ju7sbtdmO32+nt7cVmt9PR0Ul1dQ0bN23BYrFitdro7OrC5XbT0dGJx+Oho7Mz+AT99Tfe4l8v/JuS/Qcw9/ZSVl5BT4+Jzq4ubDY7vb199PX3n3Y5v0rHwK6ubtat38iu3UdzofT19eN0Oik9dPjo6/5+vF4vFosFu91OXX0D7R0dNDQ04nQ6sVgs1NXVY+7txev1sm37zuO2dbnceL1eHA4H9Q2N2B0OBgYG8Pv99A8MYLXacLncWCxWmpqaaWhoFF1PBUEQBEEQBOEMOvYBu8fjQa/Xk5mZyQ9/+EMmT57MnXfeSWpqKnfddRfTp09n2bJlhIeHc+edd3LJJZdw1VVXMW/ePC655JKhrso5bf369dTX1+NwOHC73QQCASZNmkRGRgb5+fls2LCBLVu2EBMTExwNIpPJmD59OmPHjiUiIoIrr7ySmJgYbrnlFiIjI1m4cCFKpZKioiJ2796NUqkkNDSUOXPmkJmZOcQ1Fo4lekwJX4vH7TmaCBV45933mTC+iMbmZg4eKKWgYBRer4+IiHD2HzhIQ0MjI0eO4MOP1jK2sIDmllbGFIxi165PyM3NZckVi+nu7iEyMoKqqmre/+AjcrKzaWpqxmq1kpubw/oNm9DrdNx8843odbqzWjeX241MJqO/v589e0tQq1XI5XLa2trxeDx0dnaRkBBPR0fnpzNNqYmIiMBqteJyuag8UkVCQgIhISFIHJ05yev10tHZxb59+0lLSwn2lvJ6vNTV1SGXK4iMiMDldtHX109MTDQej4fcnByRZFYQBEEQBEEQzgKZTMaECRP405/+xKxZszAajfT39xMbG4vJZOLVV1+lqKiI0NBQnE4nJpOJQCBAcnIyBoOBhoYG+vv7h7oa57Rhw4axaNEi3nrrLfr7+3nxxReRyWQsXboUi8VCQkJCcHKOwVEpWq2W4cOHs337dvx+P3V1dezevZvIyEiKi4u56qqryMzMxGQy0f9pp4Ldu3fjdrsJCQkZ4hoLxxKBKeFrkcvlxMfHs3DBfA6WHmL7zp0kxCcwfHguqSkpVFVXI5fLmTB+HK2tbVRVVREREYZWq8Xr8WCxWFEoFCQkHE1sGxoayviicURGRaLWaIiJjsbpdOLz+QkEjg7xi4uLPWnOpDNNoVAQFRVJXm4u5RWV+HxeQkNCMBgMyOVylEolRqMx2NNMo9bApx2aOru60el0R4clKuQgSYSGhtDTY8LpdGEw6DGGhlJdU0vhmAKampsJCQkhNjYWu82O0+nC6XQRFhZGbW0dRmMourMciBMEQRAEQRCEC011dTU7duxg8+bNTJo0iYqKCkaOHElPTw9dXV1UVlbi8/k4cuQIe/bsITk5mblz5+JwOHjvvfeIjo4mPDycurq6oa7KOUsul7Ns2TIMBgPz5s1j3bp1XHzxxRgMBnbu3ElDQwPLly9HoVDwwQcfBIfg1dfX89ZbbzFjxgx0Oh0dHR243W7a2trweDx0dXVRVFTE+++/D0B7ezsul4uenh6RZ+pbRiQ/v4CdmPzcYrGgUChOK3rs9wfw+bxoNBq8Xi82ux3dp7P6qdVq+gcG0Gm1KBQK/P4AASmA1+NBo9HgdLowGkOxWq3o9XrUajVutzs4pbjH40UulyGTybHZbei0Wnw+H5IkERISclr5liwWC36/P5hj6lS2HZzFT6lU4vP5cLnc6PU63G43SqUSt9uNVqv93/A6mQyf14tCoQjmtFIqlSiVSmQyWXC4okKhxOs9eg78fj8qlQqfzwcc/aPsdrspr6jEYDCQlZmBz+9H/zUS0ovk54JwYfg2Jz8XBOGbJdp2Qfh67aJcLg/2yjlWWFgYK1euJDIyknXr1rFr167gPbr4Wn3mnXgdjs3beyp0Oh2ZmZmUlZWdjeIJp+hU2iQRmLqAnYnA1LniqwSmhsqZTnYuAlOCcGEQgSlBEAaJtl0QRLsoCN8WYlY+QTgHfZuDZoIgCIIgCIIgCIJwJonAlBA02DVSdKITBEEQBEEQBOFCoVAoKCws5NChQyQnJ5Ofn4/L5WLbtm2kpaWRnZ1NWVkZzc3NAOTn52Mymeju7j6l/efm5tLV1cWECRNQqVS43W727NnDpEmTUCqV1NXVUV1djdFoJDMzk/3795/SfmNjY+nr68P7aTqR3Nxcqqurg2lCBOFcIQJTQpBCocDlcmE2m086pvpcJJPJgrPZnQvD+M60walvBUEQBEEQBEH4LJlMxsyZM1mxYgV33nknEydOZGBggNbWVsLCwrjiiiv46KOPWL58OU899RR2u52cnBwUCgWhoaH09/eTmppKeHg47e3tJCYmUlZWRk9PDwBjxoxh4sSJvPXWW0yZMoU333wzmLN2ypQpvPvuu6xYsYLnn3+eiRMnMnXqVBoaGujr6wuWb8SIEYSHh1NSUkJkZCS5ublUVFRw8803U1JSwrp16wgEAmRlZZGVlcWaNWuG8pQKwmkTgSkhSKfToVargz2mzoeeU4NBmQs1QHNsYE4QBEEQBEEQhOMVFBSQkJBAWVkZcrkcu91OVlYWqampvPfee8jlciIjI8nOzkan02G325HJZIwdOxaZTMbatWtZvnw569ev58c//jHvvvsuy5Yt4+mnnwbgoosu4q233sLtdhMfH8/kyZNpa2uju7s7eK/udDoxGAykpqayfv16pk+fHgwuZWdnM3fuXFpbW1m0aBExMTG0tbWhUqmO61QQCATYuHEjv/jFL/jwww9FrynhnCICUwLwv8CNCGIIgiAIgiAIgnAhUCqVzJgxA4DCwkIOHDjA/v372bx5MytWrECv1/POO+8QFRVFU1NTcJZtuVxOYWEhBw8exOfz0drayieffEJ+fj6ffPIJY8aMCR5DLpcHg0R1dXW88cYbeDwe5HI5er2eiIgIXnvtNXJzc4mIiCA9PZ1hw4axceNG7HY74eHhGAwG7HY7FouFw4cPM3XqVEaPHo3ZbKatrS04C3ggEECSpAvygbxwbhOBKUEQBEG4gHydHqQiD+G3z9d5oHS+DNsXBEH4qnw+H0888QQKhYJLLrmEAwcOMHbsWAoLC+nr66Ozs5OLL76YpKQkNmzYgM1mA6C9vZ0nnniCvLw8oqOjaWpqwufz0djYiM/no6GhIXiMXbt2cckll/DWW2+h1WpZuXIlTqeTd999lx07drBp0yYkSWLMmDE88sgj2Gw2pk+fTnx8PHV1dZSVlZGbm0tSUhJVVVUMGzYMvV6PyWRiYGCAiy++mF27dqHRaIiOjqa6uhqv1ztUp1QQvhKZJO4wL1iDU6iKKYUvTOL6C8L55VSnxc7MzCQ5OfkrHaOtrY3a2tqvtK1w5qlUKoqKilCr1ae9bSAQoKSkBIfDcRZKJgw10bYLwqm3iyczOCnU570+HXK5nDFjxlBTU4PVav1K+zixDCf7WaFQMG7cOEpLS3G73V/5OIJwpp1KmyQCUxcwEZi4sInrLwjnl69zAy4IwvlFtO2CINpFQfi2OJU2SSQUEgRBEARBEARBEARBEIaEyDElCIIgCIIgCIIgCMKQMBqNjBkz5rRyYPb09FBRUXEWSyV8k0RgShAEQRAEQRAEQRCEIWGxWNi2bdtQF0MYQmIonyAIgiAIgiAIgiAIgjAkRGBKEARBEARBEARBEARBGBIiMCUIgiAIgiAIgiAIgiAMCRGYEgRBEIQLiEqlYsWKFdx8881ceumlREREHLdcJpNx9dVXk5aWNkQlFE5HVlYWP/jBD1i5ciXjxo1DoVAQGhrKTTfdhEql+sJtlUolt912G6Ghod9QaQVBEARBED5LJD8XTpkkSRw4WEpl5REk6egN7cIF83G7PWzesg2P18OMaVNJSUlGJpMhSRJ1dfXUNzYyd/Ys5PKzEweVJAmPx8OOnbtoam5mzOjRjBiRT3lFBbW1dcyfN4edxbvp7+9HAnRaLQsXXIxOpzsr5TnVMnd0drJt+06QJGbMmEZCfDwymYxAIEBZeQUl+w8QFxvLzIumo9PpTmuWCkEQhM+j0+n4wQ9+wH/+8x8KCgq4/vrr+fGPf0xoaCiSJCGXy7FYLAQCAUaMGEFVVRV6vZ7Y2Fja2toYM2YMfX19VFVVIUnSUFfngjd+/HimT5/OunXr+OlPf0pxcTEvvvgi/f39AOTk5BAdHc2hQ4eIjIxEpVKRnJxMZWUlAwMDzJ07l7feeovw8HDS09NpbGzEbDaTnJxMVVUVCQkJwNGAZlpaGuXl5fT29g5hjQVBEITzkUKhQC6X4/P5gvcjGo0Gp9P5udtotVp8Ph8+n+8bLKlwNogeU8JpCTEYiI6Opqa2lr37SvD5/Dz51DM4XU5CQ0N4/K9PMmCxIEkSnZ1dPPL4X1m/fhOBQOCslmvP3hJ2Fu/CaDTy1yf+Tm1dHbt372HNex/icDoJDw8nOjqa/v4BNm7awql8l5Ik6ax96bJYrDz62N/w+3wgk9Ha2hZc1tDQyD+e+xfJSUlUVVXz8iuvnZUyCIJw4bLZbLz11lv8/ve/p6mpidmzZ3Prrbfy1FNPUVBQwKJFi0hMTOSOO+5g+PDhrFixgjlz5nDfffcxd+5c7r33XiZPnjzU1RA+dfjwYV588UV++ctfcvnllxMbG8vy5ctJTk7mj3/8I5MnTyYnJ4fvfe97PPzww0ydOpWnnnqKyMhI4GgvucmTJzN9+nSefPJJ0tPT+e1vf0tUVBS/+MUvGD16NI888ggTJ06ksLBwiGsrCIIgnI+++93v8uc//5nf/OY3jB07lpiYGBYsWPCFnRuWLVvGqFGjvsFSCmeL6DElnDKZTEZOTjb9/QN0d/dw910/R6lU0NXdzY3fW0lEeDgffrSW5qZm0tLSeO75Fxg/bhz1DQ0ADAxY8Hg8xMREfybg83V7A02YUMT4orH4fD42bd6CUqnkuutWUF5RiVKhZOqUSXg8Xj7Zs5clV1yGTqelobGJ1JRkFArFSQNQJfsPEBISQk52NjLZ1y/jsSoqK5EkiZCQENRqNbm5OcFl7R0dREZGcNGMaRiNobyw6kW+851r0ajVZ+z4giAIgwaD8CqVitWrV/POO+8wb948/H4/77//PkuXLiUjI4M//OEPPPvss2zcuBGn08mIESMoLi4e6uILJ5AkCZlMhlKpxGq10tzcTE5ODuvXr0cul/Pmm2/y+uuv849//IP09HQANBoN2dnZKJVKjEYjOp2O0tJSrrzySuLi4vjkk0+YMmUKI0eOZN++fUNcQ0EQhDNHq9VSVFR0WiM7rFYrBw4cOIulujBlZWWxevVqHA4Hd955J48++ihhYWFMnjyZSy+9lC1btqBQKJgwYQJ6vZ63336bqKgo9Ho9ixcvZubMmezatQuVSsXBgwdJTU0Nrm8ymXj++edxuVxDXU3hc4jAlHBaJEnC4/UQExPDc8+v4u67fs60qVP469+ewmgMpa+3H7fHw3P/fIHk5CTy8nKoqq7G6XTR2tZGR2cnCfHxOJ1ObDYbISEhqFQqRo8a+ZUDPzKZDLVKhcPh4N//Wc3w3FzSh6XhcruPK/ehw4cZGBhg6pRJAGzdtp1pUybj8XoxmUyo1Wq8Xi+jRo7A6/XR2dlFU/Neurq6GFMwmrCwsDNyDgHM5l46OzuRyWSUHjpMReURfv7THyOTyRiRn8/adRv47YMP43Z7jgbNJInDZeVIksTIEflnbVikIAgXhtDQUJYsWUJSUhJpaWk88sgjjB8/nra2tuPW27JlC7fffjslJSXU1tZSUVFBe3s7LS0tbNmyZWgKL3zGqFGjuP7665k3bx7vvPMONpsNAL1ez969e8nKymLhwoXIZDIWLlyI1+slPj6e9vZ2ACIiIpg0aRJ/+tOfGD9+PDKZjLfeeotXXnmF5557Dr/fT01NDSaTiaVLl7J161YxjFMQhPOCy+Vix44dQ10MgaPf16xWKzU1NTidThISEsjKysLhcKDX67FarYwaNYq2tjb27t3LDTfcQHNzMwqFAo1GQ09PD0uXLuWf//wnixcvJiYmhjfffJPk5GTq6urO+gge4esR326FUyZJEjW1dWRnZfHT23+IyWyiv6+feXNn89Pbf8jSK68gOjqKEIMBs9lMY1MTb7z5DjW1dezZu49AIMDHH6/DZDKzfUcx/f0DVB6pYvcne7Db7V+rXGZzL4//9UnUajUrV16HQnF8zNXtdvPmW+9y2eJFwdxSPT0m3nn3PUwmM61t7Wzesg2n08XuT/ZSWnqIgtGj8PsDn/a0OrNPiKOiIhk2LI15c+cwb+5sOjs7cbvdWKxWjMZQbv7+97j5pu8xYfw4MjMzkCsU7N69h/b2DgIB8WVAEISvzul08vTTT6NSqSgtLeW2227DZDLx5ptvBvNGvfzyyzQ2NtLX18fvfvc7nnnmGTweDw888AAul4u+vj46OjqGuioCsGfPHrZt24ZcLuevf/0r//znP7FYLDz99NP09vYyMDBAU1MTL7zwApIksX//fmJiYrjvvvtoaWnhmWeeoaamhr/85S+kpKTwzDPP0NzcjNFopKuriw8//BCXy0VnZydOp5PHH39cBKUEQRCEM06hUDBnzhx++MMf0t/fH3xY1tLSQnV1NUuXLkWpVJKTk8O4cePo7e1FkiSMRiOLFi2irKwMv9/PoUOHGDlyJDabjaamJjZv3sycOXOIj48f4hoKX0QmibuLC5bFYiEsLIyBgQGMRuOXri9JEh98+DEbN21BJoP4+Hh+/MNbefOttzlYephAwM+c2bNYuODiYNK6mto63nnnPe668+fUNzRQUrIfhUKBx+NhbOEYrFYb7Z0dzJ8755TK8HnleuqZf7Bp01bS04ehVqu49prlpKWl8rsH/8Ddd/6cxqYm3l3zPvf/8m60Wi2SJPGvF/5DSGgISoWCpKQk6hsamDJ5Ijt27CI6OorZsy7ijbfeYcqkiVTX1DJ/3pyvVL6TsTscPPX0P+jr68flcnH5ZYvQaXVs31nMyu9cy1//9nc8Hg8yuZzbbrmZ5KRENm7aQm1tHd+/8Qa0Wu3XLsPpXn9BEL7dBn+nBeHzzJw5k9bWVmpra79wPbVazXXXXUd7ezvr1q0TgahzkGjbBUG0i+eagoIC0tPTMZvN7N+/H4D09HRsNhtjxoyhoqKC6dOno9fraWxsZOfOnURFRdHf3096ejrh4eFYrVZ6e3v5xS9+wfPPP09FRQVTpkzB4/Gwfft2/H7/ENfywnQqbZIITF3AvkpgIhAIYLVacbncRESEo1Kp8Pl89Pf3o1SqCAszHjfMzOfz4XS5CDEY8Pv9BAIBvD4fSKBWq5AkCb8/gEaj/srD0yRJwma34/F4AJABISEhKJXK4HBBt9tNIBBAr9cHZwx0uVwolEo8bjdqtRq/349KpaK6ppbo6ChioqNxuVyoVCr8fj8ajeYrle/zyuz1+ujr60OjURMWFobX68Xt9hASYsDhcGK1WTGGhgZ7eFltNpAgNDTkjOS7EoEpQTi/iBtwQRAGibZdEES7eD5KTEzE7XZjNpu/cJ34+HhKS0tFIOpbQgSmhC8kAhOfdeyvw5lMdv5tJK6/IJxfxA24IAiDRNsuCKJdFIRvi1Npk0SOKUE4hkwmC/4TBEE4X4WHhxMVFRXskXkqtFotMTExwddyuZyQkJDga4VCgcFgOOm2crn8K/1dDQ0NRak8mjNQJpORlJSE4tPh18cyGAzBXrc6nQ6FQgEcnW1Oo9GQnJx8Wsc/WV2joqKIjIxEfZIZUmUyGTExMUPSdigUCsLDw5HJZBgMBqKjo4mKikKlUqHVatFqtZ+5LjKZjMjISOLi4oLn6sucTq9mhUJBVFQU0dHRREdHf6UASUREBKGhoae9nSAIwulSKBSo1eqT/g3XarXBdkg4u2QyGQqFArlcjlqtRqVSBd87WVul0WjO6IRQKpXquOOeaplP515K+HwiMCUIgiAIFxCtVsvDDz/MTTfdxL333svVV1+NQqE47iZs8Ge5XB68GUxPT+eKK64IrhMbG8szzzxDQkICAEuXLuXOO+8MLj/2Rv7iiy8mLy8v+PrY4w0+DDjx5lKj0fD73/+e2bNnA0fzHt18880YDAZuvvnm425Sr7vuOvLy8pDJZNx9991Mnjw5+H5ubi633HILWq32uGPI5fLjXg/e/AKkpaWxZMmS4LKEhAT+9Kc/cfPNN/PAAw8wY8aM48quVqsZP378Z+p1snNxJslkMq666ir+/Oc/o9VqufXWW7nllltYuXIl8fHxXHXVVcyYMYPly5cfV+9rrrmGn/70p9xyyy386Ec/Cpbv2HN67BcEOHp9U1NTT1qnEx/qhIeH853vfIdHH32UO+64g/nz5yOXyz+zzYk/y2Sy4DozZsxg3Lhxx21z4jU79vN5YhlO/BJz4rU5dltBEC5sN954I48++igPPfQQEydOPG7ZjBkzyMjI+Mw24iH2maVUKrnuuutYvHgxEyZM4I477uDGG28kJiaGyy67jCuvvBK9Xg8cvT+45ppruO2227jllluCD8lOdk1UKhWzZ88+7iHXyRiNRr773e/y+9//nttuu43ExMQvvcaDQamlS5d+pv08cb2Tbftl61xoRPhXEARBEC4gMpmMgYEBHnvsMRQKBT//+c8pKCggOzubDRs2ADBnzhzWrl3LDTfcQFhYGGvXrsVmsx134zX4RX/lypW8/fbbzJw5E7PZjEqlYtmyZaSnp9Pb28t///tfFi1ahNfr5cknnyQxMZHp06fj9/t5+eWXSUlJYezYsfT39/PSSy8F919YWEhVVRVTp05l69atwaCFTCZDpVIdV6fS0lKmT59OZ2cnCQkJTJkyhf3795OVlcWbb75JbGwsP/rRjwgNDeWJJ54gJCSEZcuWIZPJeOONN7DZbFx33XWo1WrWrVuH2+0+rq5KpZKamhr+9Kc/ERYWxv33309NTQ2LFy9Gr9ezZcsWYmJiyMrKIicnhzVr1rBs2TL27t3LRRddRFxcHFVVVaxZs+aMJhKXJIl33nmHMWPGBHtMrV27lpaWFjo6OrDZbJhMJnp7e4Pb5OXlkZ6ezoMPPojP5yMiIgKVSsUNN9xAVFQUhw8fZufOnfzoRz/C5/OhUCj45z//yeWXX86YMWN46qmnGDduHHl5eXR3d/Paa6/x3e9+F41Gw7vvvkt9fT1ms5m//OUv3H777axfv576+npWrlxJYmIinZ2dvPLKK1x99dW89tprpKSkkJqaSklJCTfccANGo5Ht27ejVCq5+OKLmT17Nnv27GHnzp3cdNNN+Hw+Xn31VUJDQ4MzNL3yyitMmDCBqKgoDh48iMFgYPTo0fT29vLSSy9RWFjI9OnTcTqd/Oc//+Hyyy8nIiKCuLg4Vq9eTXl5+Rm7JoIgnHtSUlJ45ZVXMJvN3HPPPfT19fHd736XlpYWLBYLHo+HBQsWsGrVKm688UZ27tzJkiVL6Ovr46mnnsJmsw11Fc55Pp+P/fv3U1RURCAQoKamhgMHDuBwOD7NyavB6XQCMG3aNBwOB3/729+IjIxEJpOxfPlywsPD2bp1KxkZGaSmpmIwGPjoo4/4zne+g1KppLu7mwkTJmA2mykuLmbJkiU0NTXx0UcfYbFY+Ne//sWPf/xjVq1aRV5eHpdddhk2m40NGzZQUFDA+vXrufTSS9m5cyeXXXYZOp2O9evXM2bMGGJiYqitreXw4cMsWrSIqqoqDh8+zGWXXYZcLuejjz5i4cKFmM1mOjs7GT58OB0dHWzevJnLL78cg8HAhg0bGD16NPHx8Wg0Gp577jkcDscQX5lvjugxJQifQ5Kk4L+hOvYXLRuqsgmCcH4YnASirKyMYcOGkZ6ejlarRaPRkJ6ejs/no6KigpqaGi6//PKTPs0rKyujp6eHe++9l9WrV+N2uxkxYgTR0dE8+uijDAwMMHnyZEpKSvjvf/+L2Wzmkksu4cknn+Sjjz7immuuIT4+ntjYWN55553gfhUKBRdffDFbt27FYrFQWFj4mWMnJCTw85//nKuvvprKykqGDRvGxIkTWbt2LTqdjgkTJtDQ0IDT6cTv97Nq1SqOHDlCUVER1113HVVVVRw5coSrr76aJUuWYDab2bt3L8uXLz9pT5rBv7n9/f20t7cTGxtLUVER+/fvp7a2luzsbDo6OigqKiIxMZH8/HwSExPJyMhg06ZNzJw5k+jo6DN6DQG8Xm+wLSgvL2f48OH89Kc/ZcyYMbz//vuUlpYGZzeCo1/Aqqurg9v19vZSVFREfHw8mzdv5pJLLiEmJobExESefvppXC4XiYmJlJSUBAOHs2bNYtOmTWRmZjJ8+HAmTpzIjh07aGpqOul5mzBhAjKZjEcffRSFQsHEiRPJyspCoVAQFhZGYmIil112GQcPHuTPf/4zhw8fBo5+vv76179y0UUXER8fT3R0NNu3b2dgYIBrr72W1atX88QTT9DV1cXIkSNpb2+nrq6OhQsXsnnzZhISEpg4cSIrVqxgx44dSJLEzJkzyczMZNOmTbzxxhvMmzfvjF8TQRDOLZIkYbfbaWhowGKxEBsbS15eHh9//DGxsbEEAgHi4uK47LLLCAQCXHvttXi9XrKzsxk5cuRQF/+8MRiEKS8vZ2BggCVLlpCQkMCOHTvYuHFjsK2LioqisbGRQCCAyWQiNTWVgoICrFYr8+bNIykpiR07dtDd3Y3RaKS0tJQdO3awdOlSrFYro0ePJiMjg4SEBLZt20YgEDiuHDqdjunTp/Piiy9itVopKipi2LBhyOVyMjIymDJlCrW1tfz73//GbDZjNpt58cUXKSgoICIiAo1Gg9VqDT5Yefnll3E6nWRmZlJcXMyCBQvo6+tjwoQJwTbJ6/Uye/ZskpOTWb9+PTabjZSUlG/8GgwlEZgSTqq9o4Oq6mr6+/tPK/jhdrvp7x+gpqaWmto6enpMHKmqpqfHhM/no72943P35/V6cTqdwRt/n893WmV2uVwcOVJFdXUNXq/3pOvYbDaqa2pPKfocCAToMZlOqwwnI0kSJrOZ+voGfD4fkiTR3d3DwMAAXV1dNDY14fP5aWtrp6W1lUAggMfjoa+v76T78/sD1NU30NPz9csmCMKFabDXUXh4OOPHj6eiooJAIEBISAjh4UdnXJ03bx7R0dHU19d/pofSIEmS+O9//8sTTzxBfX09QHC2Vp/Ph8PhQKlUBmfFGRxq5fV6cTgcKBQKJEmiuLj4uCfO2dnZJCcnM2/ePAwGA5deeulngkV9fX28/fbbbN++HavVSmtrK1deeSW7du2ivLyc733ve8FgxMDAABaLBZPJhFarDeZfstlsbN68GZVKhUajIRAI8NFHH520nZLL5ahUKpKTk0lMTKStrQ2TycSBAweC7ZXD4aC8vJybbrqJPXv2EAgEUKlU6PV61q5di91uPyPX72Tkcjn79u1j9erVbNmyhezsbLxeb3BG3EF1dXWMGTOG0NBQVCoVWVlZaDQa1Go1Op2Ojz76CKfTidlsxmaz0dvbG5y5VpKkYO4Pg8FAcXExnZ2ddHR0cPjw4c+d/UilUuH1eoOficF8ITqdjoiIiOA+XS7XpzPUupEkiY6ODiwWCz6fj9bWVl566SXmzZvHpEmTUCqVuN1uXC4XPp8Pu93Onj178Pl8wfLt27eP1tbW4DWorq6mvLwct9tNT08Pvb29IneMIAgolUrmzZvHD3/4Q+x2O62trbS0tNDc3IwkSQQCAdatW8eNN97IunXrqK+vx+fzsXv3bqqrq4e6+Oed6OhoLBYLTqcTuVwevKcYVFpayty5cykoKGDx4sXI5XJMJhNNTU1s2rQJj8fDwMAAZrM5ONw+NjaWlpYWzGYzW7Zsobu7m7q6OqxW62eO7/V68Xg8DB8+nNjYWDo7O4mPj2fcuHFERkZiNpsZNmwY+fn5REREYLVaGRgYwOPx0N/fz4EDB5g/fz59fX1kZWUxfPhwQkNDaWlpoauri7a2Nnp6eti4cSP19fX09fVRW1vL9u3b8Xg8WCwW+vv7z2j+rHOBaI2Fk2psbCImOpqy8gqSEhPp7x8gJMSAyWQmOTkJk9mMz+sjPj6Ojo5O0tJSiI2NpaLyCElJidQ3NJCdlUWPqYdAQOJwWTkZ6cOoqDxCqDGUEIOB5pZWuru6SUiIp6OjE2NYKC0tbcyaOQOXy01nVze5OdmnPOZWqVTh8XpRq9VUHqnC7/djDA3F3NtLZEQEmZkZ1NU3IAUkHA4HZeUVKBQK0lJTaWhsJCYmBpvNht/nIzIqkra2dnxeHzFf8wm3x+OlurqWmJho6urqSU1NwWaz0do2gMfjJTTEQEVlJV6vF5lMhlZz9EtTbV090f0D9PX343Q4UalVZGak4/F4Ucjl7NtXQnR0NGPHjhE31oIgnLLBYMk999xDIBBg06ZNVFVVsXXrVm666Sa6urro7u6msbGRRYsWER4eTnd3Ny6XC9MxwXqPx0NXVxd2u50DBw4QHR1NZ2cnhw4doqioiHvuuQen08k///lPcnNzWbp0Kc8++yzFxcXceeedBAIB/vvf/5KcnBzsnj9o9OjR/PnPf6a6uhqZTMbNN99MdHQ07e3t+P1+2tvbcblcNDY2BrfZsmULkZGRdHd3s3v3boqKiqirq0OSJNrb25EkCavVisfj4dVXX2XJkiWkpaWxbt063nnnHa6//nrCwsLYs2cPDofjM3VNS0vjvvvuw+v1smrVKvr6+mhtbcXn8xEIBILH2Lx5M5MmTaK4uBi3283YsWMpKiqiq6sLt9t9Vq5pe3s7APPmzSM1NRW/38+zzz570nXr6urYsWMHv/zlL/F6vdTW1rJmzRqGDx9OUVERra2tOJ1OOjo6AOjt7cXhcFBZWcl1113H008/zY4dOxg/fjx2u52dO3fS0tJy0qBUT08PLpeL3bt384Mf/IC7774bq9XK22+/jcFg4Oc//zk2m42ysjL27NnD97//febOncu+ffvo7+/HarUGr198fDzz589HpVLR0dHBW2+9xa233kogEOCNN94IfibMZjPr169n/PjxOJ1OduzYwVtvvcW4cePweDyUlpbS2dkZbHO7urrOyjURBOHc8fLLL5OWlkZfXx+lpaX4/X6ee+45/H4/a9asob+/H7vdzi9/+Utqampobm5m8uTJ2Gw2BgYGhrr4542uri42b96M3W5nzJgxFBcXU1NT85n1qqqqcLvd5OXl0djYSEVFBR6Ph4SEBNrb29m0aRMmk4mSkpJPO030ExoayquvvkphYSE9PT20tbWxcePG4/br9/t57733sFgsrF69mjFjxrBjxw6OHDmC3W4nIiKC1atXU1dXh8PhIDw8nCNHjvDBBx/g8/l46623GBgYIBAI8Prrr1NXV8fYsWNRq9V0d3fz3nvv4fP5eOWVVxg9ejTt7e3U1tYik8mIjo6mo6ODtWvXMjAwQHFx8UmDZuczmSTGAl2wBqdQPdn0jVu3bcfr8RIfH4fd7sDc20tIiIHMjAzqGxpRKZW4PR78fh8qlYrYmBiysjLZt28/o0aNYNv2HeRkZx996trbS0pyMrV19YBEVGQUhYUFbNq8FZ1OS2REBJ1dXSQkJODzesnMzCQgBaisPMLYwjGnlQzuaOApQG9fHzqdju7ubtLT02lvb2fqlMn09vbR3NyC1+fF6XQRFRX5aYTbi0Gvp8dkIiI8nIAUYFhaGo2NTUydOvlrJaRzuVwcOFhKcnISvb19qFQqQkND6O7uIeAPEB4RjsViQalQoFAqiY6OQqvRsua99xk/voj+/n60Gg3GsDBsNhsymYyszAwOHy5HpVaRnZVFaGjIlxfkBF90/QVBOPeczrTYgzPO+P3+4wIKWq022LPT7/ej1WqDPW4CgQAymey43jcKheK47Qdfy2QyNBoNHo8nuP7ga0mS0Gg0+P1+fD4fcrn8M0OTT9zv4DpyuRy/3/+Z5fC/ZN2DT1UHe+mcWK7BOgyeA4/HAxx9Yq5QKHC73cetd+w5A47rgXRsOY79+dhjD/YGOvZcnGnH1k+r1QZ7J32RwdmMBoOCg+Uc7K00uM/Bcz+4zWBwTaPRBHtknex6DO5z8NqeeB4Gyzr4mQgEAsGZsVwuFzKZLLjt4P5PvGaDs2gN5gQ78bM8WL7B8g5+5j7vup1PRNsuCKfXLgqCcPacSpskAlMXsC8KTHR1dREWFobJbMbldOHxegkzGomICMdqtdLS0oa5t5eicWPp6uoiKTkJY2goDQ2NaLVaenp6jk4XHR1FmNGISqWir6/v6BP1ri4S4uMxmcyYzGbiYmNp7+ggKTGB7u4ewsPDsNkdGPR64uJiTysoNDAwgFyuoKenB5/Ph9vtweV2kZSYSEJCPL29fbS2thEZFUFtbR3x8fGkJCfR0tpGTHQ0NruNQCBAmDGM9o4OwsPCSE5O+lqBKUmSaGtrZ2BggKyszOBNt9PpYsBiweVykZmRTlNzC0gSGRlHc7v0Dwzg9XhRKpWoVErUajXtHZ0YQ0OJjY3BZDajkMsxGo0nnb78y4jAlCCcX8QNuCAIg0TbLgiiXRSEbwsRmBK+0KkGJgY/IoPBGUmSgjmaBqftHFw++NRTLpd/aTDniz56g09gv2pAaHDfXq83ON31YPkGl3u9PtRq1XHvn8yZmL7zxHN4svc/b51jDT69PhNlEoEpQTi/iBtwQRAGibZdEE69XRzMe3fsZBKfRyaTodPpgt+FTuz1eyZ6YSqVymCvV51Oh8fjQaPRBI95bK/eE3uADs5edypf8Q0GQzD34Redg8Eesmer169w/juVNunCyqglfCWDf/yOfW0wGDAYDMFlg8sHh1KcSuDk2G1P/Dc4JfjXLbNarT6uPIPvy+VyNBr1Z94/2b8z4fP2deK5+7LjfZ1gnSAIgiAIgiAIx1u6dCk/+9nP+MlPfnLSWWDhf/fpoaGhXHnllcH3582bx+LFiwGYOnUqy5cv/8z3iPHjx5OQkHDcfo7d77HUajW333472dnZAFx66aUkJSWxfPny4DqJiYksWrQIuVzO9ddfT0ZGBuHh4VxxxRUsWbKEsLCwzz3Gsd+FVqxYEZxYZOHChdxxxx385Cc/YeLEicetO2LEiOMCfKfz3UUQTpXImCwIgiAIgiAIgiBckEJDQ3nllVew2Wzccsst9Pf3k5eXx6ZNm7j44ospLi5m8eLFBAIBNmzYcFyQJiwsjGnTptHe3s68efOwWq0kJCQEZ4vbvHkzCxcuxGKx8PrrrzN//ny0Wi07duzAYDBQVFTEpk2bKC8vByA3Nxev18uMGTOora3F+GlKlPDwcORyOUqlkr6+PoYPH86ePXsYOXIkLpeLzs5OBgYGSE1N5brrrkOtVrNq1SrmzZtHREQEu3fvxmq1MnfuXFwuF++88w4RERHBwFJ4eDhvvPEGXV1d3HrrrbS3t3PFFVdgMpmCyd+nTZvGxo0bueSSS6ipqWHKlClYrVbefPPNsza5h3DhED2mBEEQBEEQBEEQhAtaf38/AwMDREZGkpKSgkKhIDU1FaPRiMfjITMzk6ysrOO28Xq9vPfeeyxfvpz169fjdDqZNWsWxcXFfPDBB8yaNYvq6mrWrVvHqFGjqK6uDgao4uPj6erqoqqqCjg6MuLiiy+msbGRnJwckpKSjjtWdnY2N998M4mJibS0tLBw4UI2b95MXFwco0eP5vDhw3i9Xj744ANsNhtTpkyhsLAQm83GvHnzWLRoET6fj+Tk5GCPrBNZrVZMJhPh4eGkpaWxYcMGYmJi8Hg8JCcnM3HiRHw+H5dffjkOh4OsrCxSUlLOzgURLiiix5QgCIIgCIIgCIJwQVKr1RQUFFBYWIgkSTQ1NXHFFVcwYcIEoqOjGTduHHa7HZPJhFar/cz27e3tPPTQQyiVSgoKCujp6SE/P5+BgQG6urqQJIlhw4bR29tLXl4eKpUKk8mE1+ulsbExmE8qNTUVv9+PyWRiz549TJ8+/bihcrW1tTQ1NeHxeJDJZNx///3cddddxMbGkpiYSE9PD36/n4GBAQYGBrBYLPT09NDc3ExZWRnp6enI5XKamppoaGhg1qxZwX0rFApGjRpFbm4uBoOBlpYWOjo6MJvNwZl6S0pKuP766/nd736H0Wikv7+f9vZ22trazv5FEs57Ivn5BUwkv76wiesvCOcXkfxcEIRBom0XhFNvFzMyMkhJSaG/v58jR47gdrvJy8sjPDycnp4eTCYTo0aNwmaz0dLSgsFgoLm5GYC4uDjsdjs2mw2VSkV8fDydnZ2MHj0ahUJBaWkpOp2O/Px8Dhw4QFZWFkajkQMHDmAwGHC5XFitVgAiIyMB6O3tDfbW8vl8mEwmYmNjaWpqCpZZoVCQnZ1NdXU1ERERaLVa2traSElJobOzk6ioKKxWK7GxsSQlJVFRUYHdbqegoAC32015eTnJyck0NTUhSRKpqamkp6djsViC5yA1NZXGxkYSExPp7e3F7/eTmZlJVVUVOp2OgoICLBYLFRUVp5RsXbhwiVn5hC8kAhMXNnH9BeH8IgJTgiAMEm27IIh2URC+LcSsfIIgCIIgCIIgCIIgCMK3lghMCYIgCIIgCIIgCIIgCENCJD8XBEEQBEEQBEEQBGFIGI1GxowZc1yy9y/T09NDRUXFWSyV8E0SgSlBEARBEARBEARBEIaExWJh27ZtQ10MYQiJwJQgCIIgXEDS0tKIj4//Stt2dnYeNyuQMLRUKhWjRo1CpVKd9raBQICysjKcTudZKJkgCIIgCMKpE4EpIUiSJAKBAHK5/JS6UUqShCRJyGSy4/4PBAIoFIrP3YckSfgDAZQKxUmPKUkSfr//C/chCIIgfDUdHR2YTKavtK3X6z3DpRG+Dp/Px5EjR75SWylJEi6X6yyUShAEQRAE4fSIwJQQ1NjUzMZNm9HrdMydM5uIiHACAQm1WoXb7UahUCKTHX3KqlKpCAQCbNy0halTJrF23Qbmz5vD7k/24vF4mDxpImFhRiRJwuv1otFo8Pl8BAIBLBYrFZVHuGjGNKqqqtlZvJvIyAhmzZyBwWAgIEmUlh5m1Mh8tFotPp8PmUyGz+dHo1GLYJUgCMLX4PF48Hg8Q10M4QyQJAmHwzHUxRAEQRAEQfhaRGBKCBoYGCDMaEQul3OkqprGpib8Pj9zZs9k1+49uNwuEhPiaW1tZ+6cWcjlciwWC/tK9rP/wEEiIyOx2mx0dHTQ3NLCiPx8evv6MJvNjBtbyOHD5Xi9HsaMGUNvbx8Aff395OfnEWIwsGnzVvr6+0lOSsJms2EymcjMzKCy8ggejwebzc6sWReRPixtiM+UIAiCIAiCIAiCIJw+hUKB3+8f6mJ8q8iHugDCt4gEXV3d1NTUEhsbQ1NjM2qNmvqGBjxeD+3tHVgsViZMKMJmt9PW3s7IEfmsXbeBxYsWsm79BkbkDyckJIS5c2bT1NRMZeURdFotPT0m6hsa0Gg0n8ln4XK5sdrsqDVqvF4vkydNwGK1kp4+jHfefY+YmGiqa2rRarXiybAgCIIgCIIgCMJ5Rq/XM2bMGIYNG4Zc/vXDFEajkVGjRp32dvn5+YSFhZ3SulFRUcTGxgaPl5yc/KXbqNVqli9fTlJSEoWFheTk5KDRaE67nF8mLCzsjJzHb4roMSUExcbFsGjRJXi9PlRKJbNmXYTL5SI7OwuHw0l8XCyJCQkkxMcRGRkJgNvtZu6c2YwuGIXD6SAtNQW73U5kZASjRo1gTMFomlqaGVMwGoNej81uJzMjnbAwIwDx8fHs3buPmJhoZs+8iIrKI+h0OkaPHEn6sDTGjS2kcEwBOp2Orq5uUpKThvIUCYIgCIIgCIIgCGfYbbfdhl6vx+v18vzzz6PRaOjs7CQhIQGbzUZYWBihoaE0NDTgcDjIyMhALpdTX19PfHw8crmcnp4ekpOTkcvlKJVKrrvuOl566SUaGhqQJInMzEz6+/vp7OwkPj6eQCDAwMAA6enpWK1WbDYbiYmJWCwWIiMj0el0tLe3Y7FYyMzMJBAI4PF4aG1tRZIkrrjiChYsWMBNN93Efffdh0ql4v777ycjIwOLxUJnZyexsbGYTCYiIyNpb29n1KhRaDQa5s2bx4QJE7Db7bS3t/Pvf/+b5ORkOjs7kSQJp9NJIBAgNDQUlUpFSEgIDocDjUaD1Wqls7OTpKQkwsLCqKmpISoqitDQULxeLyaTiT/+8Y88++yzHDx4EEmShvryfikRmBKCEhMSjns97Jghc4PLTszvpFarmTVzBgAzLzr6f+GYAgAiIyKQJIkRI4Yjk8mIiooM7iMu7mhkOX1Y2nFD84rGjQVg3LhCJElizuyZx+1T5JcSBEEQBEEQBOFM0Gq1FBUVnVbPEqvVyoEDB85iqS5MLpeL5ORkPvroI+RyOb/4xS/4/e9/zz333MOWLVu48sorKS0tRa1Ws2XLFpYtW4bH42HdunWsXLmStrY2Dhw4wNixY+ns7KSkpIRRo0Zx/fXXYzKZePPNN1m0aBEjR47k8ccf58EHH+TDDz/EaDTi9XpJSUmhtLSU2NhYfD4fd9xxBwcOHECtVvPxxx+zbNkyampqGDt2LD/4wQ/weDwEAgG8Xi+///3vCQkJobW1lZiYGBYuXMi4ceN44IEHuOaaa1AoFOzevZuenh7mzJnDiy++yCWXXMKBAwdoa2vjkksuITMzk0WLFpGSksK7775LQkICJpMJlUrFrFmzMJlMTJkyhR07dpCQkMCjjz7K7bffTkdHB3V1dYwePRqFQkFcXByPPfYYWVlZZGdnc+jQoXNi2OC507dL+MbJZLLP/DvddY597/P28XWOLwiCIAiCcKJzafiCIAhDx+VysWPHDrZt23bK/0RQ6ux4++23efXVV7nqqqsYO3Yser2ekJAQQkNDUSgU7Nu3j1WrVpGamsrYsWMJCQnBZrOh1+txOp08/fTTpKam8v777/Pss8/S29vLgQMHeOqpp0hNTWXKlCnA0e+YCQkJtLW18e9//5uYmBg++ugjjhw5gkKhQKlUIpfLaW5u5i9/+QsRERFkZGRw6NAhtm3bdlyZJUnirbfeoqGhgX/961/4/X7Gjx+PSqVCoVBgMBgoLS1l+vTp7N27lxEjRmA2m+ns7EQmkzFjxgyGDx/Ok08+ycyZM7FYLMTGxnLo0CHy8/OZNWsWxcXF2O12nnnmGaqqqnj++eexWCyMGjWKpKQkXC4XGo2GQCDACy+8wMGDBwkJCeHIkSNs27btnAhKgQhMCYIgCMIFx2AwMHz4cOLi4k4p6C+Xy9HpdAAkJiaSliYmofi2kMlkJCUlkZubi16vP+P71+l0jBs37itvHxYWRn5+/mlto1AoSE1NRaFQAEfzjiQmJp7y9iqVittvv52EhARycnLIysrCaDSeVhm+CpVKhVqtPuvHEQRBOB/Nnj2bq666Co/HQ1VVFW1tbdx+++34/X4kSWLSpEncdddd7Ny5k48++gibzQZARUUFra2tWK1WNmzYwNKlS7n77rsxGo2YTCY8Hg89PT0oFAo0Gg29vb04HA6am5ux2Wy88cYbXHPNNUyaNAmr1UpfXx9Op5Pu7m58Ph89PT1s2LCBxMRErrnmmmBPKTjae66np4fHHnuMtrY2ent7USgUaLVazGYzOp0uGHi69NJLmT9/PmvXrkWSJCwWC6tWreLRRx+lpqYGpVKJWq2mq6uLgYEBmpubcTqddHV10dPTg8fjoaOjI1ifQ4cOUVlZiV6vp7S0lJ6eHlwuV7B+ZrOZyy+/HKXy3BgkJ5POhQGHwllhsVgICwtjYGDgG7lhE75dxPUXhPPL4O/0l9HpdPztb3/D4/EQEhLCb3/7W+rq6tDr9cjlcmw2GzKZDL1ej0wmw263M3z4cL73ve/x61//mlGjRqHX69m+fXvw5stqteL3+1GpVOj1elwuF5Ik4fF4voGaX9imTZvGz3/+c7q6umhra+OPf/wjKpUKp9OJVqvF6/WiUqmQJAmdTofFYiEQCKBQKAgJCcFut+Pz+dDpdAQCASRJwufzERoaisPhIDExkYcffpjbbrsNj8eD0+lEqVRiMBhwOp14PB50Oh1+vx+ZTIbX6yU0NBSn04lCoSA2NpYpU6bw+uuvBwM3g9vJ5fLj1h2cHCUuLo6PP/6Y++67j3Xr1vHrX/+arKwsrr/++uA+Bj+ng/vT6XS43W4CgQCTJ09m+vTpHDx4kPvvv5/y8nLi4+O59957qa+vD5bd5/MFJ1ZRKpUoFIpgTyulUondbic0NDR4jgbPmc1mw+/3BwOBg783y5cvJyQkhBdffHHIP/uibReEU28XhW+HwYdgfr8fl8uFUqlEpVLh9/uZPHkyRqORTZs2BXMvabVa5HJ5sF3yer0AwQdpbrcbuVyO3+9HqVQSCATQ6XR4vV68Xi8KhQKv18vs2bOZO3cuKpWKxx57jJ6enmA7OdiGxsTE8IMf/IDw8HDWrFnDpk2bgKMPUiRJIhAIIJPJUCgUBAKBYK4sn8+HXC7H5/MRFxdHUVERH3zwAZIkHbct/O/hhtfrJTU1lR//+Mc899xzlJeXo1Kp8Pl8KJXK4/5XqVTBNn+wroNlGOz55XK5huaCHuNU2qRzI3wmnHWSJFFa62b7QSf+gIhVni+SYpRcOjUEnUZ0jhQE4Si9Xs+wYcN46KGHqKiowGw2M3bsWH74wx8ik8l46aWXqK2t5d577yUyMpJXX32VlJQUFi9eTHV1NT09PRiNRq644gqWLFmCSqWipKSEp556ioceegilUolGo6G4uJjnn39+qKt73svJyaGjo4O//OUvdHd3ExcXxy9+8Qvuuusu7rzzTj744AOuuuoqwsLCMBqNlJaW8tRTT3HnnXeSkJBAX18fDz/8MH/84x9Rq9W88sorTJkyhYyMDLq7u3nhhRfIyMjgz3/+M+Hh4dx2223MnTuXuXPnolQqueeee/jZz35GdHQ069evJyMjg9zcXOx2Oy6Xi1WrVpGXl8fo0aP51a9+hdVqxeFwcNddd/GTn/yE7OxsbDYbTqeTe++9F0mSkMvleDwebr31VnQ6HVOmTKG3txedTsc999xDamoqlZWVPP/88/zqV79i9erVXH311fzmN7/B5XKxZMkSnn76afLy8ti5cyf3338/v/zlL5kyZQoTJkxg5syZADz88MP84he/4J577mHp0qUMDAwwZcoUjEYjSUlJHDlyhNTUVA4ePMjjjz/OPffcQ1xcHGazmccee4y//e1vWCwWUlNTue+++1iwYAFZWVnU1dWxZcuWIf1cCIIgnGsCgQB2uz342ufz4fP5ANi+fXtwnUHHBlwGg1LAcTPAD64/uHywl9WxyzZt2sTWrVuDD2dOtm17ezu//e1vP1OGY4fJDT7YOfE4g+t0dnby/vvvn3TbweMMlrO2tpY777wzuL/B90/83+PxBB+EDJZrcJuhfkByukRgSgDAH4D/fGShMEdDbIT4WJwPJElizQ4bOalqCrK0Q10cQRC+JXp7e3n44Ye55pprSExM5P/+7//4/ve/T1NTE3a7nZUrV/KrX/2K8vJykpKSWLJkCY899hjjxo1j1apVLF++nIiICDQaDYcOHeKVV17hiSeeYO/evWg0Gn7yk59wyy23BGdvFc6uN998k7CwMP7whz/Q2NjIc889R0JCAjKZjNjYWHQ6HXFxcbzzzjts376df//735SXlzNt2jT+9a9/cd1115Gfn09mZibPPPMMVquV1NRUfvjDH2IwGAgJCaGnp4e7776bxx57jOzsbLq6uigrK2P+/PmMGTOGlJQUiouL2bNnD9dffz0rV66kqKiI7373u2i1WuLi4oK9tX72s5/xn//8h+HDhzNp0iS+853vMH78eL7zne8gk8mCXwpaWlr46KOP+OUvf8lvf/tbVqxYgUwmo6amhv7+fhYvXswzzzzD5s2bef755/npT3+KzWZjwoQJ9PX10djYSF5eHhdddBGPPPII6enp3HfffSQnJ3P48GEWLFhAcnIy/f39zJw5k9mzZ/PAAw9wxRVX8Nxzz5GTk8Pw4cO55557+Mtf/kJhYSGTJk1i1apVXH/99QwfPpzIyEjuu+8+rrvuOsaMGcPWrVs5cODAZ3KQCIIgCF/PscGgs+FU8jCd7TKcaDDAdKEQEQgBAEkCr09iWoGO9ASRH+F8EAhIFJc58XhFDzhBEP4nJCSE8PBwHnroIW688cbgVMV+v5/a2lpKS0u57rrrcLvd7Nu3j2HDhgWHehkMhuP2ZTabsVqt+Hw+urq6SEhI4LLLLmPixImUlpYOUQ0vLDk5OezevZuPP/6Yxx9/HJ1OR3h4OKmpqSQnJwNH81DFxMQQFRWFXC7HbrcTCASwWCy8+uqrNDY2YrFY2Lt3L2FhYRgMBiIiIoiMjMTpdGKz2bBarVit1mCvqd///veMGjUKtVqNy+Vi165dmM1mvF4vS5YsITc39zMJyPv6+rDb7TidThwOBz6fjyVLlpCXl/eZdSVJ4rXXXuPAgQNYrVYAJkyYwLRp03juuee45JJLUCqVpKSk0NrayrBhw1CpVFx55ZX885//DAa4KisreeGFF2hrawPgT3/6E7/73e8YN24cKpWK119/nYcffpjKykpaW1vx+Xz09vbS399PX18fFosFv98fzClisVh45ZVXaG5uxu12MzAwQF9fHyqVCrfbTWhoKBqN5rgn9oIgfLsZDAby8/Pp7++nvr7+C4MU4eHhwZ6SgUCA5ubmb7Ck5y+5XM6IESOCQ6QbGxvp6uoa4lIJ3yQRmBIEQRCEC4jf7yc1NZVFixYxMDDAs88+i0ql4kc/+hGzZ89m9erVbN68mZUrV6LT6Th48CBtbW2YTCaWLFlCU1MTFosFt9sNHO1OXlJSQkNDQ3BGnL6+Pjo7O4e4phcGj8fDtddeS3h4OC+//DKVlZVs3bqVu+++m/r6evr6+ggEAowYMYIpU6awatUqiouLefnll1mwYAH19fVYrVZKSkpwOp10dHTw8ccf89vf/pbq6mpeeeUVDhw4gCRJlJeX09bWxubNm7n88ssxmUx0d3dz6NAh+vv7sdlsPProoxQUFGCz2Whvb2dgYIDDhw/T29tLRUUFgUCA/fv309PTw6OPPsro0aOx2Wx0dHQEn0a7XC7279+Pw+Fg//79JCQkcPDgQSorK7HZbMydO5cDBw4QERGBXq/nxhtv5KabbmLmzJnYbDbq6uoA6OnpYdu2bZSVlQFH80Zt27aNyy+/nI6ODkwmE42NjchkMt577z0CgQCHDh3CZrPR2dmJUqnE4/FQUlJCRUUFr732GgsWLKC2tpaBgQH27duH1+ulqakJv99PU1MT8+bNY+zYsezcuXPIPhOCIJyeW2+9FYPBgMvl4rnnngMgPT2duro63G43ubm5OBwOurq6eOihh3jxxReDuRTlcjl6vR6LxUJbWxspKSkYjUacTidNTU0XXK+Xr2owMLVixQrKysrYtm0bO3fuRKFQoNPp0Gq1wfxTtbW1hIeHB6/RwMDAUBdfOANE8vML2LHJr3X6UO74Wzd3XBMhekydJwIBiV//y8TiqSGMH677zHKR/FwQzi+nm+RVLpcjSVKwZ4lMJkMmkwWDAycuP/H1sQaTpd9xxx3Ex8djsVh45JFHMJvNZ6Bmwpc58drJZLJgElSZTMY999zDm2+++ZmeAAqFIvj62GF0g8tOzLdxLLlc/plhDXK5nJtvvplRo0YhSRJPPvkkVVVVJ91eoVDwgx/8gBEjRhAIBHjiiSeoqak5rk4nO/bg5xA4brlMJmP+/PlUV1fT0NBwSudKpVJx5513Ehoaym9/+9vPTRB7bFm+6Jx93nkZCqJtF4RTbxdvueUWRo4cydq1a9m5cycPPPAAra2tREZG8swzz7B8+XLGjx8fzM/36quvBhNQz58/n9raWrKysnjsscf48Y9/zK5du7jyyiu56aabRK+f0yCXy/nVr37FO++8Q0ZGBmFhYchkMsxmM8uXL6ekpISsrCxef/11Lr/8clpaWoiIiOB3v/vdOZdP6UIjkp8LgiAIgnBSJ355PjHodOLyL/qyLUkSdrudBx98MDhTjHju9c058dpJkhQMnkiSxCOPPBKcbvtYJyZt/bxlJ3Oyz0MgEAj2wPP7/V/4mfH7/Tz99NOfu+7nfX4+b5+SJLF27dovLPPgeoP79nq9/PnPfyYQCHzp5/vYcn9RGb8NQSlBEE7PBx98wL59+7jxxhuDw/paWlpwOp1Mnjw5OINpWFgYtbW1bN26lWnTpgVn5Pz73//Or3/9a9LS0ujr62PLli0sWLBgqKt1Ttu2bRu/+c1vkMlkPPTQQ1x++eW8++67TJ06lTFjxpCfn09zc3NwVlfh3CcCU8IXMpnMdHR0Mnx4Lnv3lTBu3FhUyqMfm8EnhbW1degNehITEoCjXedN5l6G5+VSeaSK5OQkzOZeDAY9sTExQ1mdbw1Jkig9dJiq6houmT+Pw2VlWK02Ro0cQVl5BT6fjxnTp+HxeNi7r4Sc7CyioiLZ/cle0tJSycxIZ8/efSgUSsYXjUWpFL/KgiAMPUmSjpsZR/h2+KaHkpzOZ2CoPy9imI0gXNhkMhlTp05l4sSJwaHGMTExJCYmcuDAAex2Ozqdjv7+/uBwvRUrVnDkyBEUCgVdXV14vV46OzspKytj2LBhfO9730OtVg/537dzjSRJmEymYP4+k8mEz+ejr68PrVbLTTfdhMFg4IknnsDv95OQkMD+/fs/t7ercG4R32aFL9TY1MSjj/2VFcuv4sWXXuYvj/2Z7TuL0Wo0zJg+jY/XrqOuvoG5s2exb99+vF4v6cPSaGpuYXheLk6nk5df/i8Op5Pvf++Goa7Ot4bL5eLlV14jNjYGh8POuvUbmTC+CL1eT2pKMqtf+S/Tp03hvfc/IDk5mdUvv0pWViZhYWG88cZbjB9fhNlsxuF0otGoGVs4ZqirJAjCOeTzhkmdif3C5/d2Ec68waFpg0SPHUEQhFM3ONHCu+++SyAQwOPxUFdXh1arxe12EwgE2Lx5Mz6fD5/PF8x7NBh0+vDDD/H5fPzpT39Co9GgVCrR6/W88cYb9Pf3D23lzjGSJPHss88SCATIz88nLCwsGITav38/L774IgMDA3g8Hv7+978Hr5G45zg/iMCU8IXkcjnTpk6hovIIY8cWUrL/AJER4XR2dvPhx2vRabWkpiTT1t7Ojp27iI+LRaPRAEdvlscUFLBp81amTJ6I0Rg6xLX59tBoNFxy8Vz++9qbNLe0MvOiGRwuKyM+Pg6Hw8mkiRMwGAyMHj2KXbs/ocdkZvGli9i1+xO6unvo7u4hNycbu8OB2dw71NURBOEcYjAYWLRoEa+//jqSJLFw4UL279//ucnKZTIZM2bMoLi4+Auf/k6cOJGFCxficrnYsmULu3btOltVED4lk8m45ZZbGDt2LGq1GqvVymOPPfaFOZYEQRCE40mSdFyvm0AggMPhCL4+dpbNwQDVsevC0WG+DoeDxx9//FuTa+5cNDhcury8nLvuuiv4+q9//etxQ6lPvEbCuU8EpoQvpNfpGD16JBPGF/HmW++Qm5PNuvUbUaqUzJ0zm/fe/xCXy8XcObPI7uomPDycYWmpdPeYAJDLZYwaOYKU5OTjnuhe6LxeL41NzahUShRyBU1NzTidLiLCwzlcVv7/2bvv+LiqM/H/nzu9atR7r+694oqNsbGxTe+EAKElATbZzWaT7Cabuvl90wOEkGxooUMcqm16Me5dtmVbvXdp1KaX+/tD1l3LNsQQQJb9vF8vvTTllnPnzsy585xznsPa1Rfz0iuvkpuTQzgU5sJlS7HZbYTDYRYtnM/cubN57rm/odfrueH6a0f6cIQQo4jZbGbixIm88MILqKrKmDFjqKysJD8/H5vNRmpqKm+99ZYWqEpOTuZrX/saqampvPzyy4wfP55x48Zx4MAB9u3bp7VUFhQU8M4772izmonPn6qqvPTSS7S0tHD99dfz05/+lClTphAMBunt7WX58uXs3buX6dOnk5qaSmtrKy+99BI2m43LL78cnU7H888/L636QgjxGZKg1Gfj+EDUP8p7KEY/CUyJj1VcXERxcRGKonD1VVeg0+koKMhHURT0ej33fP0uYHCWmunTpqKqKnq9nvHHbePCZUslKHUCk8nEdddezcoVF5KSksKUKZO0GYJyc3NQVZVJEyeSn5fLhPHjMRoHP6q5uTkYDQYUReHur9+lnQchhPhnKIrC9ddfz+uvv05VVRVf+9rX+OEPf0g4HGZgYID29nYOHjxISUkJS5cu5aWXXuKaa67B6/Vqs64Fg0FWrFjBsmXLWLduHXv27Bnhozo3tLS0UF9fT29vL5WVlcydO5err76a0tJSpk+fTnd3N/fccw8/+MEPuOGGGwgEAkyZMoWBgQH0ej1f/epX+dnPfjbShyGEECMmLS1Ny2eUlpZGS0vLKYeHmc1mdDrdsB5Ux4uLiyM/P59oNMrhw4cl99En5HA4KCkpoa2tjZaWFsaOHYvFYqG5uZnW1lby8/OxWq0cPXpUm4WvpKRES4LucDgwm810dXWRkZGhndOUlBSam5sByMrKoqOjA7/fT2JiItFolO7u0xt9kpSURG5uLh6Ph+rq6tM6v+PHj8disdDW1kZTU9PHDjuMjY0lEonQ399PYmIiHo/nI99rZyPdSBdAnNl0Oh06nU4LgCiKgtFoxHAsOGI0GjEajeh0OgwGg3Zbpxt8aw1NWS2BqeEURcFiNpOamoqiKBgMBkwmk/Y6GwwGigoL0Ov1mM0m7TU1m0za63n8eRBCiNMVCAQwmUza94fVasXr9dLf38/WrVvZtWsXRqMRk8kEDObE6+npoba2ltzcXPbt28eRI0coLS0lOztb2+769ev5wQ9+wKOPPsqqVatG6vDOeRs2bGDmzJlcfvnlvPjii0SjUcrKynj33XfZtGkTRUVFjBs3joyMDCwWC729vVKPCCHOaZdeeimxsbEYDAauuuoqHA4HmZmZTJw4kfj4eG25BQsWaM8nJSUxadIkHA6H9vzMmTOZOHEiNptN+y0kTt/ixYtJS0vj5ptvJi0tjcsuuwyHw4HJZGLWrFlccMEFjB8/nksvvVSrt8477zxiY2NJS0sjOTmZK664AqvVyne+8x0mTJhAYWEhCxcuBAZ/195+++1cccUV2O12/uu//ovzzjuP5ORkxo8fj91ux+Vy4XQ6iY2NxeFwkJ6erp3L888/n7FjxzJjxgyuvfZa4uLimDBhAi6Xi4SEBCwWC2azmYSEBGCw48YNN9xAcnIyN998M/n5+aSkpDBhwgTMZjNJSUlkZ2djs9kAmDVrFuPGjdP2lZOTQ05ODmPGjMFgMBAXF8ekSZO05c820mNKCCGEOId4vV4qKyu5++678Xg8BINB2tvbcTqdXH/99Xi9XhobG7WWwGg0SigU4uKLL2bXrl3ceuutxMbGMn78eO6//35tu2vWrCE+Ph673c6hQ4dG6vDOScfPhtjZ2cnRo0eZNWsWpaWlzJ49m+nTp/PDH/6QyZMn84Mf/ICmpiYWLlxIf38/Bw4ckMSxQohz2lCj71CDempqKnfccQdvvPEGy5cv54EHHsDn82G1WnG5XKSmprJq1SqOHj3KwoUL+eMf/0g4HEZVVRITEwEoLS0d4aMafdavX4/L5WLMmDHAYEN+SUkJ3d3dTJs2jRdffBG3280999yDwWAgFAqh0+mYOnUqCQkJvPDCC9hsNsaNG0dNTQ2TJk3C7/eze/dubR+dnZ2kpKRwxx130NDQQDgcJj09nbFjxzJv3jzWr1/PmjVriEQivPDCC+Tn59Pe3k40GkVRFOrq6vD7/UybNo2kpCTGjRvH+eefz759+7DZbPj9fi1JPgxec5WXl5OTk0NmZiZTp06loaGBcePGkZmZSTQa5dFHH8Xr9Q7r3KHT6XA6nVxxxRXs2LGD7u5urrvuOqqqqpg5cyaPPvroWTe8UQJTQgghxDlEVVUeeeQRcnNzMRqNVFdXEwqFaG9vZ+PGjej1eioqKrQcGaqq8vvf/57k5GRqa2v55S9/SUZGBq+//vqw3ETr1q2jqKiIQCAgybe/yAyE1gABAABJREFUYEeOHOGnP/0pqqoydepUMjIyeOKJJ/D7/aiqyrvvvsvzzz/Pn/70JxobGyktLWXXrl2YTCYqKytHuvhCCDGiIpEIycnJhEIhLBYLAA0NDbz99tsUFRXhcDjw+Xy0tbURDAYxmUy0t7fzzjvvMG7cOMxmM+FwmN27d7N3714uvPBCSkpK2LVr1wgf2eiSlJTEZZddxmuvvUZ7ezt//vOfMZvNrFmzBq/XS2xsLH6/n0gkojWoWCwWVq5cySuvvILP5+Pw4cNcfvnlPP7441x00UVYrVb+/ve/a/sIhUK89tprTJ48mdbWVhISEpg3bx4NDQ1MnjwZt9uNy+Wis7MTt9vNjh07tGT3iqIwffp0ysvLef7557nkkktwu93k5OTw9NNPc/3116OqKo8//ri2v4SEBGbPns2+ffvweDykpKTQ1NREKBQiFArx0ksvaddS/f39ZGVlsXv3blJSUti5cye7d+9m/PjxuN1usrKyaGhowOfznZU9nSUwJYQQQpxjwuHwSQGJxx9/nPr6+lO2wLndbtxuNwDd3d2nzMcQDAalp9QI8fv9NDU1AdDW1sb//u//snfvXmCw1b6pqYmqqipt+Wg0KgEpIYQ45u233+bqq68mHA6zbds2wuEwubm53HbbbQwMDGj1X3t7O+effz6VlZU4nU5uvfVWqqqqtDxAJSUlzJgxA4PBwBtvvDGShzTq6HQ6br31VhwOB3PnzsXv97Ny5UoMBgOlpaVUVlZy1VVXEYlEePfdd7VgUUtLCxs2bGDx4sXExcWxZ88e5syZQ3V1NWVlZcTExAzLBdXd3U1bWxvr1q1j4sSJWlDRZrPR1dVFUVERu3fvxuFwkJuby5w5c3jhhRcIBoP09PSwY8cOqqurgcEgl81mw+12MzAwQFNTE4mJicMa7fbv38/zzz9PKBTCarVSUVGB0+lk165d6PX6YTMLHjhwgDlz5vCNb3yD9vZ2urq6iIuLQ1EUOjs72b17Ny6Xi6NHjw6bGfJsoajSf/uc1dfXh8vlore3F6vNyb/d386li5xkJEm88mygqvDQiz1cvzyGacWWk54//vzHxMSMQAmFEJ+loc+0EEJI3S7EJ6sXDQYDOp2OYDBIamoqU6ZM4d133yUUCg2bZc9kMhEKhbR8r8FgUOu9M/RYNBo9KwMHn7ehfMYw2IA2lHc3EAgAaLkxh4auw+BrrqoqOp1OO08Gg4FwOKwNizv+/B2/3NC+hnIlh0KhYUPbTSYThYWFlJWVoaqqtq/jt2U0GgmHw2RnZ7N27Vqee+45LdH6ifs7cV+Kopw0g+PQNofeV8cfs6IomEymYe+50eJ06iQJTJ3Djg9MOJxONm718O4eL6GIvCXOBooCaQkGbl8bS6zj5Jn7JDAlxNlFAlNCiCFStwvxz9WLJwYhhPhHTgxCif9zOnWSdI0RAOgUhYvm2lk+2458BZ8dFAaDU2fhEGQhhBDH6PV6xo4di9PpZN++fYTDYaZMmYLJZGJgYIDS0lIMBgNTpkxBURT27t2rtTbr9Xpmz57Ntm3biEajGI1GsrOzqaqqQqfTUVBQoA35y8/Pp7a2lkgkgl6vp6ioiIqKCiKRCGazmdzcXI4ePfqJyq4oijarVU9PDxUVFdoU4P+I1WplypQpwOBQjrq6utP6EZmamorX66Wvrw+TyURaWhp1dXWfqNxCiLOfBKXEJyVBqX+OzGMpNIqioNcrGOTvrPjT6xV0OuWsTI4nhBBi0Ny5c1m8eDHjx4/n1ltvJSEhgeuvv574+HhiYmLQ6/XcfPPNTJ8+nenTp/OVr3xFqxcMBgOrVq3CYDBgs9mwWq3cfvvtOJ1OMjMz+e1vf0tSUhKxsbHcdNNN2npWq5Vf/vKXnH/++SiKwlVXXcX3v/999Ho9drudlJQUjEajdh/A4XBgMBhwOp1a2Y1GI3feeSdpaWksXryYO+64A71eT0JCAvHx8SiKgsPh0GbLOn5a9oyMDG644QZSU1O5+eabWbRoEYqikJCQQGxsLDCYFNdqtQ5rpV22bJk2HXdcXBw33HADiqIQHx+v5fJQFIWkpKRhZRVCCCHE50d6TAkhhBBCjFI7duxg69atFBYWcuWVV2K1WoHB4FFZWRlOp5P8/Hz+8z//E4Cf/exnxMbGasl8YTDIc+WVV/K///u/VFZWMmHCBNLT0yktLWX69OkEAgEOHjw4LGdKTU0NS5cuxe/3M3bsWFpbWzEYDKxZs4aUlBQMBgN//vOfue2229iwYQPLli3jiSeeYM2aNTz22GNakv2+vj5ee+01bDYb//7v/05WVhYrV64kJSVFm/HqjTfeIBgMsnbtWh544AGtJ0N1dTUvvvgibW1tTJw4EaPRyKRJkzCbzaxfv568vDwWLlzIpk2bWLduHfB/08IP0el0FBcXc/3119Pc3Mzf/vY3FixYQFZWFjExMTz11FPDEscLIYQQ4rMnPaaEEEIIIUapYDBIWloa1157LU8//TStra288MIL1NfX87WvfY34+HjC4TCRSIRIJEIwGMRoNGrrx8XF8V//9V+89dZbdHd3s2nTJubPn09JSQmPPPIIU6dOZfbs2WzZsmXYfnt7e3nhhRe44447+Otf/0owGMRgMBCJROju7mbcuHEoisK7777Lz372MzZu3EhnZ+ewoBRAeno6//Zv/8Y999zD66+/jl6vx+fz0dPTw6xZs9iyZQvLli1jwYIF7NmzZ9jwmjlz5vCd73yHCy+8kLfeeouLL76Y1tZWOjs7WbhwIXa7nf379w+bKjwcDmMwDLbL6vV6IpEIoVAIo9FIW1sb4XCY5cuX09zcTH9/P3Pnzv28Tp0QQgghjpEeU0IIIYQQo9S4ceO4/fbbeeqpp+ju7iYxMZFIJEJ9fT3hcJj+/n4URWHatGlEo1FUVR3WW8pgMPDSSy+xfPlyDh48SHV1NXl5edTU1FBZWYnVasVsNtPS0nLSvvft28e//uu/4vF4ACgoKCAnJ4cnnniCWbNmodfrGT9+PNu2bWPy5MlUVVWRnZ1NdXW1FmBqbW3lwQcfpL+/n0AgwL/8y7+wZcsWrFYrc+bMoaysjEsvvZT8/HxefPHFYfvfvXs3999/P36/n0gkQnt7O62trfT29tLT08OMGTOora0dFsyqqKjg/PPPp7y8nHnz5lFbW0tfXx+PPfYYq1evpru7m87OThobG6mvrx82u5IQQgghPh8SmBJCCCGEGKVKSkro6Ohg3rx5ZGdn8/bbbzN+/Hjmz5/Piy++SHt7O3/4wx+48sorUVWVBx98UEt+HolEePnll9m4cSNer1dLBL5u3TpaW1uJRqO88sorWk+oIaFQSEuY3t7ejslkYsuWLVRVVdHR0cHy5cvZvn07ZrOZjo4OnnnmGZYvX05SUhLXXnst//M//6P14Nq0aRM9PT3aMMGNGzeydOlSuru72b17N4qiUFVVhV6vx+v1amXo7e1l+/btDAwMaI/95S9/Yc2aNaiqyiuvvEJ5eTk9PT3DXq/du3eTlJTEjTfeSGtrK+vWrSMuLo4FCxbQ29vL/v37qaur46KLLsLv91NeXv45nj0hhBBCACiqTDlwzhqaQnVo+ka3u4eDBw8RnxBPcVHhsK7+QzweD3X1DYwdU0J3txuPx0N6ehrl5RUUFxeh1+uBwaEFHq+X+Lg4raXyxCTc4XCYI0fLGVNSTGdXFwP9A/h8fiZMGEcoFKKpqZmMjHTq6xvIzsmmqamJ3p5exo8fp3XDH9ru0D663W56enopyM+jubmFquoaUlNTyM/LRa/XEwqFKK+oZOyYEnS6wZGsH1W+s92J518IMbr9M9NiC/FFcDgcpKenf6Jgz8KFC5k6dSqPPfbYSUEm8dGkbhdC6kUhzhSnUydJjymhOVpeTunBg8TFxnL48BGysjLJz8ujta0Nr9dLY2MTkyZOYNu27YwdU0LpgQM8/czzXHH5pTz2+BP84v/9jB07duFyxWA0GNn4+pvc9pWb2b1nLynJyaSkpnD48BEWLphHTEwMVVXV3P/AH7n3nq/R19dHfX0DHR0djBlTQigU4m/rXmTxooU8+ND/cs/dd3Hw4CGqq2s4cKiMxYsW0NbWTn19I4sXLWB/aSk+n5/U1BTq6uopyM9j67btBIMhDpUdpqmkCJ/PT0JCPB0dnfT19VFSUkx5eQXRaJSOjk4uWHq+NnuQEEIIIT5bAwMDn7gH0gcffMCmTZtk6nYhhBDiLCaBKTFMQX4+UyZP4vEnnqKhsQmbzcaevfuwmM3s2bsPo9HI0LWhTqdj4sTxbNu2g7Fjx7B581bi4uI4cqScgoI8snOy2LptB9nZmezYuYuU5GS8Xh9Wqw1VVdmw8Q3y8nLZsPEN5s6ZhaqqqCq8/MqrWqvqa+s3MmP6VP627kWuu/ZqqmtqSUlO5s0332F/aSnZWVl4vd7B7ackM3ZMiXYsiqIwbtwY9Ho9W7Zso7Kyijvu+ApbtmyjqKiQF174Oy5XDB9u3kpWViapqSnMnjVzhF55IYQQQpyKBKWEEEKIs5sEpoRmMEfENo4cLWfZBUvYv/8A6ze8TkpyMvX1DaQkJ2MymXA4BnsVWcwWZs2cwYQJ43nmmefIzc1hy5Zt6HQ6cnJy2L1nH0UFBezesxeT0URSUhJxcbGYTEa6u91YrVaWXbCEDzZtxufzYbfb8fsDrL54JTqdjpqaWioqK1m5cjkP/elhcnNySE9LIy8vl56eXkpKStDrdMyYMY2W1laSEhPJyc6mo7MTALPZzIYNr2OxWFix/EJ0eh1pqSnExDiZNXMGv/jgt9x7z1dpbWvDbrOTn5c3ki+/EEJ8IVwuF06n81OtOzAwIMOphBBCnDX0ej2pqamfaJ1QKER7e/vnVKJzk8k0+Fvxk/D5fHR3d39OJRJfNMkxdQ47McdQJBLB5/NhMBgwm82Ew+HBaaVNJsKhEKqqYjKZUFUwGv8vEepQ7iaDwYDX58Og12M2m/F4vVgtFnw+H0ajCb1eh6Io6PV6otEokUgEo9FIMBhiKL2TqqoYjUYURSEajRIMBjGZzASDAcxmM6FQSFsfwO8PYLfbCAaDRCIRLBYL0WgUo9FIKBQiEAhgNBoxmUzadNBD//2BAJZj2wwGg9hsdnS6cyfPlOSYEuLscrq5NAYbCeI+1T7cbjcdHR2fal0hxBdH6nYhTq9eNBqNZGVlfaJcs36/n6ampn+2eOI4FouFjIyMT7TOwMAAbW1tn1OJxGfpdOokCUydwyQwcW6T8y/E2UWSvAohhkjdLoTUi0KcKU6nTtJ9QWURQgghhBBCCCGEEGIYCUwJIYQQQgghhBBCiBEhyc8FMJjbKRqNEgqFRroo4jOk0+m0nF1CCCGEEEIIIcSZRgJTQtM/MIDFbAYJYpw1AoEAqqpiNptHuihCCCGEEEIIIcRJJDAlNDpFwWw2S++as4hOUQiHwyNdDCGEEEIIIc44Op2O7OxsGhsbsVgsxMTE0NzcTGZmJoFAgPT0dEpLS/lH84WlpKTgcrkoLy//gkp+dlEUhdzcXBoaGrBYLLhcLpqamsjIyKCrqwu/3/+ptms0GrFYLPT393/GJRafNckxJYaRoJQQQpzdFEVBr9dr9w0Gw7DbZrMZm832ibbpcDgwGo2fWRmFEEKIL8qdd95JcXExq1ev5mc/+xlWq5VvfetbFBcXc8MNNzB58mQcDgd6vZ4xY8ZQUFCAoijExsYyefJkXC4XY8eOZenSpcTHxzNlyhSZFfNT+OpXv0pBQQFr167VzsO///u/k5WVxZQpU0hLS8NsNpOdnU1mZiaZmZlkZWWRlJTE5MmTyc7ORq/Xk5eXx/jx40lKSmLu3Ll85zvfIT09nczMTHQ6HVlZWeh0EgY500iPKSGEEOIckpeXx0033cSPfvQjiouLueOOO/j3f/930tLSuOmmm9i3bx+ZmZk8+OCDw1qIFUX5yBbjb37zm7z++uts3779Y5cTn71/pkFJzpMQ4lwXjUb58MMPWbJkCTk5OUSjURYsWIDH46Gjo4OJEydy9dVX43a7OXjwIIsWLcJsNvPUU09x1VVX0d7eTmxsLO+++y5Go5F77rmHYDDIiy++SFlZ2Ugf3qihqiqbNm3iggsuICcnB1VVWbhwIT09PTidTlasWEFJSQkPPPAAP/3pT3n88ceZM2cOoVCI5557jgULFjBt2jS+//3v8/Of/5w9e/aQmprKli1bKCoqIjU1lS996Us88MAD3H777Xzve98jGAyO9GGL40hgSgghhDiHtLW1MXnyZNLT07noootYsWIFf/zjH5k7dy69vb0YjUbmzp1LQUEBmzZt4uWXX2bZsmUsXryYPXv2sG7dOiZNmsTll19OU1MTjz32GDExMZjNZubNm8eqVavYt28fL7zwAtFodKQP96xmNBqZOXMmJpPpE68biUTYvXs3Xq/3cyiZEEKMHjt27OBLX/oSVVVVrF+/nrvvvpsHHniAaDTK3r17+dOf/sQ3vvEN9Ho9NpuNrq4ukpOTGTduHO3t7Xi9Xsxmsxbkuvjii8nPz5fA1Ce0bds2rr/+eqqrq3nttdf4+te/zh/+8AfmzZuH1+slNjYWp9PJoUOHePrpp5k9ezZ//OMfmTFjBoqiYDQaiYuLo729nV//+tf8+te/pq2tjb1797J//36OHDnCnXfeyfbt2yUodQaSwJT4WJFIhGg0isFgIBgMYjKZTmqdDQQCKIoOk2lwGMfQB91oNGrrhMNhdDodOp2OUCj0iWaKU1WVQCBAMBjCYjGfFbPMqapKMBjE6/XhcsUQjUbx+fxYbVb8Ph/RqIrNZkWn0+HxeLFYzOj1ejxeLxazGYPBgN/vR5G8YEKIT8jj8bB9+3YuuOACJk+ezKOPPsqFF17IlClT+MUvfsG4ceMAePHFF/nud79LRUUFd911F/fffz933nkn9fX1fOtb3+Lxxx9n+fLlLF++HBjsuXP99ddz5MgRqqqqpDfOFyAUCrFly5aRLoYQQoxqHR0dHD16lK1bt7Jnzx7Wrl3Lrl27sNlstLW1EQwGaWtrY/369dx0002EQiF2797Nhg0bSEtLY9++fXR2dpKQkEBqaiqhUOhTNRic6zo7Ozly5Ajbtm1j//79XHzxxezZs4fCwkKsVittbW14vV5qamqIRqM0NjbS19eHqqpYrVY6Ozvx+/20tLQQDodpa2ujtbWVoqIixo8fz3vvvceVV17JL3/5y5E+VHEKiipXjuesvr4+XC4Xvb29OJ1O+vv7TxoPXVdXz/7SAyxatIBXXl3PZZesYWDAg8FgIDbWRWdnJ42NzSQlJWK1WtHpdfT19gGQk5PNjp27yMvLpaK8kunTp2I0Gtm+fSdz5sxCURR6e3sJBILEx8fR09NDJBIlKSmRru5uFBRiYpyYTCbe/+BDrFYL3d1uFsyfx8BAP05nDKoapa+vn5gYJzqdjt7ePmw267Evpy4SEuLPyBnpwuEwb771Dj6fj8WLFnLwUBkmo5HCwnza2tqpqKxi9qyZ9PX309XZRTAUIikpkb7ePoKhEEVFBRw9WkE0GmH6tKnExsaecj/BYJBwOHzKfDHHn38ZBy/E6Df0mT4dU6ZM4Xe/+x379+/nN7/5DX/+859pbW3llltuYfXq1RQWFnL//ffzxBNP8Nvf/paf/OQnPPvss8TExPDWW2/xq1/9ihdffBG9Xs/WrVtZu3YtL7/8Mt3d3Vx99dWkpqZy9913f+pkpUKIf47U7UJ8snrRZDIRiUSIRCKYzeZjDe+DORkjkQh6vZ5wOIzFYgHA7/ej1+uxWCz4/X5UVUVRFK3R2OfzSa/hT+FU58FgMGgdHYY6O4TDYQwGA5FIBEVRsFqthEIhQqGQdq6MRuOwc3bHHXfQ39/PI488IufmC3Y6dZL0mBIfa+hL9t1338fpcFDfMBiZ9np9pCQn0e12MzDgIapGaW/vIBqNkpOdhc1uB2DsmDFsfP0Npk+bitlsJhKJEggEABgYGGB/6QHsNjtNzc20tLRiMplIS0ulp6eHaFQlLS2VwoJ8gsEgdvtgcCUQ8FNZVc3AgAeH3Y7ZYqasrBe9QU96Whq79+4lPi6OYDCIvkrP/PnnnXE9inQ6HS5XzLEv0CCNTU1kZWQQCAYpKMinvaOTpKREuru7MRgN1Dc0YLVatNs6RWH8+LH4vD6amls+MjAlhBCncuTIEfr6+njjjTeor6+nqamJDz74gGAweKw3pxdVVenr66O8vJzt27eTm5tLIBDgyJEjrFu3jvz8fMLhMHV1dXg8HqLRKMuXL8discgFnxBCiFHl+KFdQ79VVFXVZrce+n98g0skEsHj8Zy0LZkR+9M71XkYCkgNGbrGGHpMVdVh52Ho8VAoBIDP5wPgvvvuIxqNSo/uM5QEpsTHU6C4qJD4hHiOHinXWgz0ej3hY7eNBoP2BZGSkjw41I7BYR1Op4OUlGTS09OOtSIMDiNpaGjEYBx8+xlNRkLBEMnJSTjsdsLhMKFQiGAwBMe+OIxGA4kJCfT19dPe0UkkEhkca+xyUVRYyN69+9DrDQx4PETCx6LsJjPxCfEj9cp9rGg0it1moznYSltbOwnxcbhiXXR3u+nq6iYvL5eurm5SU1Ooq2sgPj6evLxcmhqbiY+PJzU1hZqaWsKhMPn5eSN9OEKIUcbv93PTTTcxMDBAJBLh3nvv1S6233zzTfR6PT6fj3/5l3+hp6eH7373u6SkpNDT00N/fz+///3vSUlJwe/3093dza9+9Sv8fj+7du0iMTERt9stvaWEEEIIccaIRCIjXQTxMSQwJT5WakoKqqpisVgwTZyAzWalta0dk8lIYkICLS2tKArEx8fT09uLgoLLFQPH9VCaMnkSJtPgcDqdTsfkyZMIhULExsUyduwYAv4AyclJgz2cjnWX7evvx+v1kZiYAMC0qVNwOp3ExcViNBqxWa2MKSnGarVhs1mZOnUKHZ2d1NXVk5GezoQJ42htayMxIeGM6y0FoNfrSUlNQQXGjh1DZmYmfX19ZGSkM+Dx4HQ4qKqqJjMzg7i4WIqKCzHoB4dPFhUVYLPZsLW0ojfoSUlOHunDEUKMQt3d3drtnp4e7fbxASW32w0MtmA2NDRoj0ciEZqbm7X7fX192u3GxsbPo7hCCCHE58Jms1FSUkJ7ezvd3d1kZ2dTX19PQkICqqrS1tYmvaC+AJmZmSQnJ+Pz+aipqfnIBi6LxUJqaiq1tbUnPZeUlITH48Hr9RITE4NOp9OucRRFYezYsZSXl2tD/E7cjslkIj8/nyNHjpx2uS0WC8nJydTX13+SwxUnkMCU+FhDY3KBwYATkJWZoT2Wedxtq9V60vqKogwb260oCllZmafc11AuKFVVmT5tKoqiaMnWh7YxNGTNfmyo4JDYWBcOh53MjHTMZjM6nY7cnJxPcqhfKEVRSElOJiU5GUVRiI+PIz4+DoC42FhUVaWoqBBFUcjLy9XWO/52Rkb6F1xqIYQQQgghzi7nn38+er2e1atX88ILL3D99dfz2GOPsWTJEsLhME888YQEpr4Aq1evpq2tDZfLxZQpU3jppZfIzc2lvb2drq4ucnJyMBqNtLe3a0GnvLw8dDodVVVVRKNRioqKiI+P59VXX2X16tXs27ePuLg4dDod9fX1LFu2DIvFQmtrK729vbhcLvR6PQUFBYTDYXp6eli0aBHV1dXk5+fT19c3rCHuRHq9nquvvprs7Gx+8pOfoKoqMTExRCIRAoEAcXFx+P1+cnJyqKurIxQKkZeXR3d3Nx0dHaSkpGAwGOjp6SE3N5fGxkatUdBms5Gfn09bWxvd3d0kJyej1+tpbm4+K1MmSGBKnHEURRkWEDtdBoMBg2H0vKU/rifXUPJEIYQQQgghxOdnw4YNOJ1OSkpKaGhooLy8nGg0yhtvvEE4HNZyHYnP1+DkWrEkJydTXl6Oy+WipKSE1atX89xzz3Httdeya9cuQqEQkydPJjY2ltzcXHp6eujs7MTtdlNaWsodd9zBjh07SEpKwm63c9555wGwf/9+kpKSyM/P59JLL+Xhhx9m2rRpZGRkEBMTg9frZe/evSiKwvLly7Hb7aSmpvLss8/S0tKCw+EgLy+P5uZmurq6UBSF888/H5/PN6wXenp6OhMnTqS8vJyCggJSU1NpaGhg0aJF/P3vf6e4uJgxY8bw1FNP8d3vfpfXXnsNp9OpJW13u90YDAa+9KUvUVtby7Jly3j11Ve55557eP3112ltbT0rA1O6kS6AEEIIIb44VquVxYsXk52djcFgYN68eVx66aWkpQ3mApwxYwZFRUWkp3/yXpljxozhiiuuYMyYMcBgr9sVK1awZs0anM7B2VNnzJhBcXExaWlpn/WhnZPmzJlDSkoKANOnTycrKwuACRMmkJf32eQgHGpN1unkslEIcfZJSEjguuuuY8OGDfT39wODCbSrq6upr6+XZNlfkFAoRGlpKW+//TbFxcXMmjULi8WCw+HAYrFw8OBBxo0bR0xMDEajkYKCAj788EM2bNhAb28vMDi5Vk1NDddddx379+8nKysLp9OJ3+/HaDTS0dHBxo0baW5uxuFwaEP33nrrLTZu3KglUR8/fjxWq5W+vj5tVI/VaiU3N1ebXS4xMZELL7yQhIQEJk+eTPKx9CrV1dUkJyezYMECDh06RFFREWazGa/Xy+zZs3E4HFitVi1gtXHjRnbt2oXD4aCwsBAYHB1ktVp58803qa+vJy0tjebmZl577bWztveeXGEIIYQQ55CEhASWLFnCpEmTWLJkCdOmTaO1tZW7776b+Ph4lixZwnnnnTdsWt/BYdhZFBUVYTAY0Ov1JCUlkZ6ervVUjY+PZ8WKFTQ3N3PXXXeRkJDArbfeSlxcHHq9nnvuuQe73c7SpUuZN28eDodjpF6Cs8r06dO5+uqrMZlM/OQnP+GWW25Br9fzjW98g4SEBIqLi5kyZQo2mw2LxUJeXh4FBQXExcWRk5NDYWEhsbGxTJ06laKiInQ6HUlJSeTm5jJz5kxiY2PJzs7m/vvvZ8qUKWRmZhIbG4uiKBQUFGA0Gkf6JRBCiE/NYDBw8803k52dzZw5c4iJiaGnp0eb0U18cfx+P4sXL2b+/PlaHiir1crAwAAGg4GYmBgMxybd6unpYfPmzaxevZqbbrpp2AzlW7duJTU1lV27drF9+3Z8Ph96vZ6amho6Owcn0erq6iIQCNDV1cXmzZu57rrruOGGG7Barbjdbt544w0sFgs+n4+2tjYAOjo6eOWVV6ipqQGgq6uLH//4x7zyyits2bKFrq4uYDAvZ1lZGXFxcVRVVfHBBx/gcrloaGhgYGAAi8WCx+PB7/fT2NhIJBIhKSkJVVW1IFhfXx91dXXccccdxMXFUV5eTkNDw1kdJFXUs/noxMfq6+vD5XLR29uL0+mkv78fp9MpQ8jOIsFgkHA4jM1mO+m548//8T9AhRCj09Bn+nSsXLkSRVFITEykvb2drVu38qtf/Ypf/epXGAwGFEXh8OHD2rTNK1asYMKECVpL4osvvsgvfvEL9uzZw0MPPaQ9rtfrMZvN/Pd//zd/+MMfuOuuu/jP//xPIpEIP/zhD3nkkUdwOBwnbV98esXFxfzoRz/ipz/9KT/84Q+JRqN873vf44c//CH33nsvt956K2lpaXi9Xh599FEefvhh9uzZwyuvvMIPfvADPvjgA1577TWWLl3KhAkTeOCBBygoKGD16tU0NTUB8OSTT/LQQw/x4IMP0t7eTmFhIQ8//DC//OUvuf322/F6vSP8KogTSd0uxOnXi0P1Hgz22tHpdGflUKkznV6v13rmDvUKMpvNhMNhIpGIdp6CwaB2jobyEZ843NJoNGrBRaPReNJ6Op0OVVVRFIVoNIrZbEZV1ZO2HY1GT6uH0vHvmZSUFK6++mo2btxIeXk5iqJgNpsJBoNa8GnomIb2r9PpMJlMBINBbTtD64VCISKRyKh+X55OnTR6EvKIz52qqgSCQSQsdfYIBoOYTKaRLoYQ4gz16quvcv3111NYWDg4I2pf30mz6ul0OubNm8evfvUr+vv7+fGPf6y1KN93333DWpVtNhu33XYbGzdu1GbBUVUVVVW1RKDV1dVf5CGe9WpqaggGg9xyyy288MILzJo1i9tuu41t27ZhtVpxOp309vYya9Ysnn76afr7+/n2t7/N5MmTaWpq4gc/+AHLly/HaDTi8/mYNm0aHo+H1157jRdffJGHHnqIyspKjh49yiOPPILZbObSSy/l4osvZvfu3RKUEkKMeicGHkbrj//RLhKJEIlEhj12/Mx8x19vDJ2jj2rgOn7ZU6039H+oj87xga1/tO1TOf4909bWxv333z9sH8cfx/G3h/YfjUZPmoXwxPXO9velBKaExuFwEAqFOLvf8ucOhcFWBhlmIYT4KGazmT179qCqKvHx8bS3t5+0zNBU2eeddx4tLS2EQiF8Ph+tra3DLvbi4+P5/ve/z549e2hubiYQCNDd3c3y5csZGBgAOOX2xT8nFArx7rvv8u1vf5tf/vKX9Pb28sADD7By5UrmzJmDwWDg/fffZ+bMmQA0NzdrwaSGhgZCoRCXX345L7zwAuFwWGtFDgaDRCIRLahos9mYOXMmmzZtYu/evdx+++1cf/31I3noQgghxBnpbA8ifR4kMCWAwa6COp0Os9ksQ/nOEkMReDmfQogTHT58GEVRGBgYID09HZvNxoMPPnjK1kFVVXn00UdZvnw548aN48EHH6Svr4+33npr2HIOh4OysjJ0Oh3Tp0+ntbWVBx98kIsvvpiEhISTeleJz85rr72Gx+OhpaWFgYEBvve971FRUUFzczNOpxOn08mf/vQnmpqaeOaZZ1BVldraWl566SUikQi//vWvmTdvHvv27aOhoUELSnk8Hh599FF6enq4//77tZmLWltbOXjwoPR+E0II8Zmx2+2UlJTgdrupqanB6XRSXFxMW1ub1ptbURRycnKIj4/n8OHD+Hw+LS9iVVUV3d3dOBwOSkpKhq1nNpspLCzk0KFD/1QZx40bR1VVFYFAQJsYpKKiQvvdZbVaycnJ4ciRI//ci3EOkhxT57ATc0yFw2H8gQCKDOY7S6jodHqsVsspg1OSY0qIs8snyTElxKdVUFDANddcw7PPPktlZeVIF0d8BKnbhZB6cbT50pe+RGdnJ2PGjOFvf/sba9eupaKiggkTJvDss89SX1/PuHHjWLp0KXV1daSlpbFu3Tq+/OUvs3//fqZMmcJf/vIXrrnmGqqqqpg0aRLPPfcctbW1xMfHc8011/CXv/yFuLg4FEUhLi6OgYEBbDYb/f39Wl5FQJvkpaOjg+TkZLq6uoiLi+Oyyy5jz549dHR0UF9fz9SpU9m7dy95eXkYjUZ6e3u54oor+OCDD6isrMTj8VBUVEQ4HKatrQ2j0UhfXx9JSUn09fVRVFREU1MTAwMDJCYmAmjBtNjYWPLz86mtrcXr9VJcXIzb7aapqYmMjAxcLhddXV0kJCTQ3NyM2+0mNzcXs9lMRUUF8fHxpKena+UYSZJjSnwiHo8Xu92OdLA5ewQCAQKBABaLZaSLIoQQ4ixQVVXFT3/605EuhhBCiLPMwMAAxcXFuFwuYmJiCAaDbNiwgZ6eHiZNmkR9fT3Tpk3j9ddfp6Kigm9+85vMmjWLQ4cO8cYbb5CSksK4ceNQFIXXX39dW6+2thYAi8XC1Vdfzb59+1ixYgV1dXXMnz+fDz74gOLiYn73u99pqQcALr/8crZt28YNN9zAk08+SVpaGk6nk5ycHFatWsWf//xn5s6di8ViIT8/n66uLqqrq8nJyaGkpIQ5c+Zw8OBB8vLysNvtVFRUkJ2dzaZNm5gxYwZOp5OWlhaWLl3KW2+9xV133cXDDz9MY2MjNpuNm266ibKyMmw2G4WFhbS2trJ48WLeeecdrr32Wnbs2MHXv/513nzzTS688ELefPNN5s+fj8fjITk5mRkzZlBZWUl7e/uIB6ZOhwSmhEanUzAY9DL06yxyujNJCCGEEEIIIcRIycrK4vXXX2fmzJkkJCRgs9kwm81azyaTyYTH4yExMZGGhgb0ej09PT3k5uZiMBhwOBz09fVhsVgwmUzExsbS39+vbX/8+PG0tbVpw9/feecdEhISeOedd3A6ncTHxxMIBLRZ8BobG7nooos4ePAgq1at4q9//SuZmZm8/fbb6HQ6XC4XBoOBvLw8tm/fTnV1NYmJiRw+fJgNGzZw6623UlhYiNlsxuPx0N7eTl5eHitWrOD111/ntttuo7u7m56eHoxGI7t27WLnzp0AuFwuAoGAVrbZs2fz9NNPEw6HyczMpKuri/Xr11NYWMjbb7/NbbfdRm5uLjExMbjdblRV5cMPP2TOnDm0trbS0tIyUqf1tOlGugDizCJBKSGEEGL0MBgMWuvy6U52MTSM4VR0Ot2wXrZ6vf6k2V11Oh3JycnDHouNjUWv15+0veTkZG3670/KaDRiNps/1bpCCCFGl+rqapYsWYLL5aKsrIyKigruuusuMjIyOHDgAFdeeSWbN29m+vTp3HHHHWzbto1du3ZhMpn42te+RmdnJ4cPH6asrIy77rqLnJwcLdATiUR45513eOKJJ5g/fz7d3d2Ew2E6OjoIh8N0dXURHx/PihUrtPLs3bsXr9fLW2+9RSgUor6+ns7OTsLhMG63G7/fT0dHB1u2bGH58uXccsstOJ1Ourq6CIfDtLe38/7772v1bUNDAzt27EBVVaqrq3n99deJj4+no6NDG6I3pKOjg97eXu68806mTZvGvn37uO222ygsLGTfvn20t7drk9NEIhHa29vZuXMnvb29WK1W6uvrSUlJAUbP73vJMXUOOzHHVH9/v+QjOMsEg0HC4TA2m+2k5yTHlBBnF8mlcW5atGgRl112GS0tLTidTv74xz/S0NCgXYgOXebpdDqi0Sh6vZ5Vq1axceNGgsGg9viQuLg4br/9dn71q18RDodZs2YN3d3dfPjhh9qyMTEx3HvvvfzsZz8jGo2iqiorVqxgy5Yt9PX1aftWFIXvfve7/OY3v8Hj8Zy0r49jNBr5xje+QXl5OS+++OLHLnvidj9uPx/13Ccp22ggdbsQUi+ONoqiYDQaiUQiRCIRAO2+yWQiLy+Pw4cPo9Pp0Ov12oQqQ+uFQiGtzhta7/jvdUVRUFV1WP14/GOxsbHExsZSU1OjraPX64lEItr/45c//r/BYEBRFEKh0CmfA7RRLEPbOrGcQ8sfX16TyUQwGERV1VMue+J/vV6PTqcjFAqh0+kwGAynnNjmiyY5poQQQgghzmI2m4333nuPDRs2cN1111FQUICqqlx11VUYjUaeeeYZ9Ho9V1xxBW63m6effhqHw4GiKCxdupTZs2fT09PDI488gs/no6enB1VVKS4upqqqijlz5vDb3/6W6667joyMDPbv38+OHTvIysri3nvvRafT8eCDD2K32wGYMGECq1atAuCvf/0rJpMJnU7HkiVLmDZtGu3t7TzzzDMfe6GsKApr167F4XBgtVq1x2NiYrj44ot55plnmDZtGoqiYLVamTVrFl1dXTz11FPMnj2buXPn0tPTw2OPPcYVV1yB3W5nx44djBkzhoyMDDZt2sT27duBwZmarrnmGtLT06moqODFF1/k2muvxW63s3XrVvbv3/85nj0hhBBDVFU9qW4YCj75/X4OHz4MDKYqOT7g9HHrnbj94/+f+Jjb7cbtdg9bZyiANPT/xG0M/T8+dcrHPXf8tk4s54n9hVRVJRAIfOyyJ/4/PqgXjUbPiKDU6ZKhfOJjBYNBPB4vqqrS29t3ytZEj8eDz+fT7nu9XrzewXX6+vtRVRWPx0s4HEZVVdrb22ltbfvIL4yWltZh2zsbDb2edXX1hEIhWlvbaGpqxu/3EwqF6e8fTLwXjUZpa2unr6+PcDhMS2sbHo8HVVXp6urSxhALIYQ4NymKwsUXX8y//uu/Mm/ePDo6OvB4POzfvx+3283KlSspKSkhHA7zwQcfEAwGGT9+PGlpaSxatIj77ruPV155RbtwVlWV119/nWXLlmlJY7OzsykoKODNN9/koosuIj4+nkgkwiOPPEIwGGTcuHGMGTOGmJgYrr32Wh577DEee+wxLYlsXFwcq1ev5u233yY7O5vJkydrZV+5ciVf/epXyc7O1o5pwYIFxMXF8dZbbw0bBjgwMEB2djbZ2dksXboUj8fDZZddxjvvvENKSgozZsygpaWFPXv2UFhYSElJCeeddx6NjY1UVlYybdo0du3aNWw2wYULFxIOh/nd735HTk4OEyZMYO7cudTX12s/goQQQgjx+ZIeU+JjNTU1s33HTs4/fxGvvrqeq6+6grb2DkxGI2lpqdTV1dPc0kp2ViYqKga9gWAoCCjk5eZw9Eg5ScmJ1NXVM2f2bBRFYfuOXWRkpFNVXc2YkhK6urvJSE+jo6MTgG3bd7Dk/MU0NDTicrmwWMy43T0YjQb6BzxYzGbC4TA5Odk0NjVjNpmwWi20t3fgjHESFxtLXV09SUlJRCJh+vr6ycnNwWg4c97ukUiE9z/YhF6vx+Vy4fP7OXjwEDOmT6O2to5AMMDiRQvp7e2luaWF1tY2UlIG83QcOlRGfn4eLS0thEIhxo0bS3JS0kgfkhBCiBGgqiovv/wy69evZ+LEiaxYsQKv10tNTQ2tra0UFhayadMmvF4vt9xyCw8++CCANjzA6/UOa2EFKCsr44orruCyyy7jT3/6ExkZGdhsNhITE3n33Xfx+Xx0dnbS09NDe3u7lpNqaHjFwMCANpwBBocq2Gw2kpKSKC0tpbm5WSt7aWkpNTU1dHV1AYNDHBYuXIiiKEycOBGbzcbGjRvp6uoiGo3y5ptvct111xEIBGhvb9e2W1ZWRmdnJ1/+8pd5/vnn6enpwWq14na72bp1KwMDAzz44IMsWrSI8ePH88ADDwBgt9vp6+vD7/fT09ODxWLB7Xazbdu2UdXSLIQQo118fDzTpk2jtrZWy7F0opiYGHw+3yk7OIjR7cz5pS7OSIqikJiYwJYt20hPT6emtg6A1r4+Oo9dJEajUVpaW7XAUnZ2ljZMYMKEcbz86nrmnzcXk8lIJBLF7/cT8AewWCxs3rIVh8NBR3sH7e3tzJ49i8TEBGpra0lMSqSs7DBWq5WoGiUYDJGYmMC+I0dJS0vFYDDQ3t5OY2MTaWlp2O02akrrcLlcpKelEgwG2b5jJzabDUWnoyA/byRfymH0ej052VlUVlUTjUZISkwkJsZJR0cHoXAYNaoSjUaJjY0lxefH5/PhsNvp7Oyis7MTs8nEmDEleH0+Ojo6JTAlhBDnKK/Xy8qVKxk/fjwWi4WXXnqJ9PR0xo8fTzQapaenh4kTJzJ9+nS8Xi8+n4+Ojg6amppobGzk29/+NqFQiD/+8Y/a7EWhUIhNmzZx3nnnUVdXR3t7OzNmzGD8+PF0dXXh9Xrp6OgABnO4eL1eurq6GBgY4J133uGb3/wm0WiUJ598ks7OTtra2nj//feZMGECoVCId999Vyt/Y2PjsOOJRCL8z//8D4qiMHnyZDIyMrSgFcCBAwf46le/yp///Ge6urp46623tO1u3bqV7u5uZs+ejc1mw+fz0draSjgcxmKxsHz5chwOB/X19dr23n//fe666y7GjRuH1+tl//79zJw5U370CCHEF0in03H11VcTDAZpb2/HaDSSnZ2N0Wikt7eX3NxcampquO666zh69CjvvvuufE+fZST5+TnsdJKf19c3EAwGSUtLpbT0IIlJibS3tRMOh8nISKepuRmfz09qagrt7R0kJsTjjHFiMBjJy81BVVW2bt3G1KlTsVotRCIR3nt/E7NmzcBqsbDpw83HWmET6HH3MmZMCYfKynA6HAx4PHg8XuJiXaSkpFDf0EhBQR7l5RWkpCTT4+6lq6sLn99PcnIS+Xl5HC0vx+FwEI1GiYuLpaqqBldMDCUlRWdU8sNQKMSevfuoq61n2vSpDAwMkJycRDAYpLvbTUtLK5MnTcTn97Fz524mT55EYkICtXV1BAIB8vJyqa6uIRpVmTRxAgkJ8afcjyQ/F+LcIUlez03Hz5oXDoe1nkoOh4NAIEA0GtXqgUgkQiAQ0JLEKoqC3W4nGAye1DvoxKSper0eq9WKz+cjEolo29Dr9aiqik6n04YD2mw2VFXF5/MN25fNZiMYDJ72jwmdToeiKMOSxF5zzTVkZGTwm9/8hkAgoG03FAoRDAYxGAxYrVb8fr+WsHZofwaDAYvFog2JHzL0uNfrJRqNamUezaRuF0LqxdFEp9PxzW9+k7KyMux2O0eOHOGuu+5i/fr1pKam0tvby969e1m7di2HDx/mzTffHPXf0+eS06mTJDB1DjudwNRQXqihi1Oj0Uhffz8GvR673U7vsdl37MdaJlXAarGgKAoGg+FY0rYgZrNJmy0gEAho0z8P5bByOh1EoypGo4FQKITRaKSntxerxYJeb0CvH7zgNRgMhMJh9Do9Kioejwe9To/ZbBp8bmjdnl7sdhsq4Pf5cLlcn3q66s+Dqqr0DwzQ3d1NdlYWwWAQk8mMTqcQjUYJhUJ0dXdjt9kIh8Po9HrsNjv9/f04nQ6MRiP9/f3odDrsdvtHTgMqgSkhzh1yAS7OBS6Xa7Dn9XEJYcXJpG4XQurF0ebGG2/kgw8+YObMmZSXl7N69Wp+8YtfkJeXx+LFi2loaCA2NpZt27ZRXV090sUVn4DMyif+aYbj8jINBZNij/uCP/620+k8aX1FUbBYzCfctwzb5tB2T9xPfFzcsMf1ev2w/wCm2NhTLnN8DyLrcfs7UyiKgtPhwHlsyOPxr4lOp8NsNpOWmqotO+T445ILTiGEEOeaU+UcEUIIMfp1d3cTDAZxu934fD7q6+uJRCKkpqZiNBqJRqM0NDSwfPlyHn300bN+sqxzjfSYOoedTo8pMbpJjykhzh3SMixGq6Ee1Z/VckJ6TAkBUi+OdkPf+YqioNfrteHiBoNBuy1Gh9Opk86csU1CCCGEEOITsdvtXHHFFdx6660UFhYCkJmZyV133cUNN9yAw+HQljUYDKxYsYJ77rmHqVOnAoM9jS+88ELuvfdepk+fri130UUXcc899zB58uRh+1uwYAElJSWfWfmdTidXXXXVsMeWL19O7Ak9onU6Hddee+2wHsZCCCHOXkMNEaqqDgtESVDq7CRD+YQQQgghRqkLL7wQo9HIli1buOWWW/j5z3/O1772NZ577jmys7O59dZb+f3vf4+qqixdupTCwkI2btzIrbfeitvtprCwkDFjxrB+/XpuvfVWenp6KCkpITc3l40bN/KVr3wFt9utzWSXlZVFJBIhJiaGxMREPB4PiYmJpKSkUFZWxpgxY2hsbKSysnJYOR0OB7GxsTQ3N1NcXMzRo0fJyspCVVUmTZpEU1MTbrebsrIyWltbCQaDWCwWpk6dSjQapbS0lPHjx1NXV0ckEmHXrl2oqsqECROwWCzs37+fzMxMamtrURSFnJwcGhoamDx5MqFQiNLSUpKTkzGbzRiNRioqKoDBgNeYMWNISUlh7969DAwMMGnSJCwWC/v27SMSiZCfn09SUhJNTU04HA5sNpu2/4kTJ2I0Gtm3bx8A06dPx+fzceDAAS1puxBCCCE+ngSmxDBD3SWFEEIIcearqanh6quvJjk5maamJjIyMujs7GTv3r2UlZXx05/+FIvFgs/nY/bs2Tz88MM0NjayadMmpk2bxrhx4/jrX/9KXV0d7733HjNmzGDs2LH86U9/orm5mc2bNzNlyhQtMAWDCcjvuusuXn31VWbNmsXYsWOpra3ly1/+Ms8++yz33nsv//mf/zksH5TVauXLX/4yTz75JP/zP//D3XffzU033cSTTz5JXl4eMTExXHHFFfzmN79h+fLldHZ2cumll9LR0YFOp6Ojo4OEhAQyMzMpKSnBarViNpvJycmhr6+PsWPHYrFY2LZtGzqdjmnTpmnD2Z1OJ2lpaaSmpjJjxgweffRRrVyLFy9m8uTJVFRUMGnSJDIzM7FarfT19TFv3jyeeeYZvvvd7/LMM8/wox/9iDfeeIOsrCysVisOh4Pk5GT8fj+FhYW0tbUxefJkysrKKC8vx+v1fpFvBSGEGNUKCgrIyMjA6/VSVlY27DtUp9MRGxtLd3c3LpcLr9f7iWflS01NJT8/n9LSUgYGBgCwWCzMmDGDSCRCdXU1bW1t2vIul4v4+Hhqamo+drsGg4GMjAzq6uowGo2kpqbS0NCgPa/X6xk7dixHjhzB6XTidrs/clvJyckUFRXR0NAwrN49F0hgSmhUwOuVJHJnDWWwq6vVah3pkgghhPicTJgwgf3791NbW8uqVaswmUzaJCJ6vR5VVbXhENFoFJPJBIDJZNICNyc+FolEhj02dAEPgz8ObrnlFkpLSzly5AgTJkzgnXfeoaysjLy8PNavX8/EiRNxOBzDAlNdXV2EQiFWrFjBu+++y6pVq+jr66Ovr4+Kigo2bNhAamoqiYmJ6HQ6LBYLGRkZPPTQQ4RCIfR6PV1dXaxfv576+noKCgooLCykv78fVVWx2+289tprrFy5EkVReOWVV7j33ns5fPgwiqKQkpKCoij8/e9/Z8eOHVq5ZsyYwXPPPUdtbS16vZ4f/vCH/L//9//weDz8+Mc/1qYt37BhA9OmTeONN94gOzubMWPGUFxcTEdHB3q9HrPZzKZNm5g9ezZjxoxh8+bNn++JF0KIs8zy5cupqakhIyODsWPH8ve//50xY8bQ0tKCoih89atf5c9//jOrVq2iurqad955h/z8fHQ6HYcPHyYzMxObzYbf76exsRGbzYZer6e7uxuAK6+8koGBAex2O2+++SYwGKxauHAhmzZt4oYbbmD9+vX09/djsVgIBAJYrVaysrJoamoiPj6ecDhMWloaBoOBsrIyIpEITqeTtWvXct999xEbG8vKlSt56aWXSE5ORlEUjhw5gs1mIzc3ly996Us89NBDNDU1nXT8Op2O6667jt27d3Pddddx33334fF4SE5Oxuv1EolEcLlcqKpKTk4OR44cAaCkpISmpiY6OzvJzMwEoL+/n4KCAo4ePaoFwpxOJ+PGjaO5uZm2tjYyMjLQ6XRUV1efEfkbJTAlNE6Hg2g0eka8McVnw2I2o9NJKjkhhDhb6fV69Ho9RqMRk8lEfX09ZrOZSy+9lOTkZPbv309KSgqJiYm8/fbbfOlLX+LDDz9k9uzZ/PKXv6S9vZ0bb7yR999/n3nz5vGb3/yG9vZ2brrpJjZv3syMGTP4xS9+oe1PVVWefPJJMjMzWbBgAYB27RAOh7VAmNlsZvny5bz99tuEw2Gi0Sh79+7lxhtv5Nvf/ja/+93v+PWvf42qqtqQt2g0qu0jEAjQ2dnJypUriUajlJWVafsZWm737t2kpKRovZOqqqqIi4sjEolQU1PDvn376Ovro6enh/r6eubOnav9QBly4MABVq9ezYEDBwiHw1RXV7N27Vo6Ozvp6+vTfgwcf3xD5dizZw9Op5Pa2lotsevmzZtZsmQJmZmZHD58+It4CwghxFnBYDCQl5dHYmIiBw4cwOFwkJ6ezoUXXshLL72EwWAgGo1qjS4zZ86kuLgYGBwufvnll2tDspOSksjJyWH//v10d3djNBqx2+3k5eWxZ88eHA6H1ujS1NTEpk2baG9vZ968eSQnJ9PR0cHevXvJz89HURQOHDjAvHnzaGtrIy0tTRvSPtQIMTTiSFEUFEVh5cqVBAIBsrOzAZg7dy7r169Hr9drdZjNZmP8+PG0tbVRX1+Pqqr4fD6mT5+OTqfTeoSlp6eTlZVFf38/ZrOZCRMmcPjwYW688UZeeukl0tLSWLZsGc8++yz/9m//xhNPPMGyZcvo6OigpaUFt9uN0Wjkxhtv5MCBA1x22WV8+OGHfOUrX+HJJ5+kpqbmjPj9L4EpAfzfh0iCGEIIIcTo8cwzzzBjxgxiYmL4zW9+g9vt5le/+hXz58/n0KFDbN++nYkTJ5KVlcVLL72Ex+MhLy+P3/3ud3R0dNDZ2YnX66WwsJD77ruP1tZW2traGBgYIDc3l9/+9rd0dnZq+3v//ffxer0EAgFyc3OprKzE5/PR39/Pc889B8Df//53ent7GT9+PO+884627pYtW2htbaWlpYWf/exnHD58mGg0ynPPPYeqqrzzzjv09fXR399PZ2cnf/zjH5kzZw6RSITm5maefvppfD4f5eXlNDc309LSwvTp04mJiaG0tJRIJMIjjzxCNBolEonw5JNPMmvWLHQ6HQ0NDfh8Pvr6+oa9fm+88QYdHR2kpKSwfft2du7cycyZM7FYLDzwwAOEQiHWrVuHqqqsW7cOt9tNIBCgra2NlpYWZsyYgd1up7q6GkVRSE1NZePGjZSXl38xbwAhhDhLBAIBNm/ejKIorFmzBoPBoDV0mEwmmpubqa+vp729ncrKSiZPnozNZqOtrY1gMEhXVxevvvoqJpOJG2+8kVAoRG1tLTDYq6i1tZXm5mZuvvlmfvzjH2uBKUVRMJlM5OTk0NXVhdPp5OWXXyYuLg6dTsf27du5/PLL6enpwWAw4HA4aGpqIhAIAIOzoMfGxhITE0NOTg4ej4dIJMKOHTvo7OzE5XKh1+vp7e2lqamJ1tZWYLCHlN1u13oom81mXC4XTz31FJdccgkJCQm0tLRw9OhRFi5ciF6v55VXXuHCCy+koqKCjo4OFi9ejNfrxWazkZCQwJEjR9iyZQvd3d0sXLiQsWPHUldXh8ViwWq1sm3bNpKTk0lKSuLIkSN8+OGHX/yJ/ggSmBJCCCGEGKWGLuSP19fXx/r167X7VVVVlJWVoaoqpaWllJaWas+pqsrBgwc5ePDgsMf279/P/v37T9pfY2OjdvvQoUPDnhsaVlBeXo7RaOTRRx8dlgNkYGBA2/fevXtPWm8on0ZPT4/23Ntvv63dLisrA8DtdmtDE7Zv3z6sDEM/QgD8fj8ffPCBdv9U+TqGEqkf78QL9aEg09GjR4HB13xo/1u3bh227PHlFUIIcfoGBgZYuXIl4XCYPXv2EAqFyM3Npbe3l56eHhRFYc6cOdTX13PBBRfw+uuvs2zZMux2O42NjdTV1REMBunv7wegoqKCYDAIQFtbG7Gxsfj9fsrLy3E6nXR1dREIBCgqKuLOO++ks7OTl19+mRUrVuDxeLBYLHR1ddHY2EggEGDbtm10dnaSkpKCw+HQ8kh5PB4+/PBDvv71rxMKhXjqqacYN24cPp8Pt9tNKBSitbWV/v5+bDYbU6dOZc+ePQwMDPDee+9pxx8MBqmvr2ft2rX4/X6tLvT5fDQ0NJCUlERNTQ1btmwhNTWVQ4cO4XQ6ycjIwO124/F4qKurAyAtLQ2TyaQFzwYGBjh48CB33nkn/f39bN++XRv2f6ZQ1DOh35YYEX19fbhcLq37+YmOf2sM3R7qWXXickNJ0//ZxOnRaPRTb+dU5T1VD7DjlzvVfj7u+RM/LoqiDDv+j9vuP9rOF+0fnX8hxOgy9JkWQgip24WQenE0GvpNNPRbyWAwaMOpdTqd9rtLr9cTiUS0341Dw+MApkyZwqxZs3jqqadOypF4fN7Fj9rnR5XrxN/Dx+8T0Ibpfdx2PqoMJ27n+Fldc3Nzueiii1i3bp2WnP34ZY5/jT5uO6dbxs/D6dRJEpg6h/2jwEQ4HGbrth1MmzqFhsZGwuEw+fl5WMxmvF4viqJgNBoJh8N0dXWTnJykJYnz+fzY7TY8Ho+WeM7j8WAwGI59kFWi0Qh6vQGz2UQ4HMbv99Pa2obD6SApMZFgKIQajWIwGtEpCqAQjUYIBoPoDQasFguhUAhF0QEqoVCIrq5u+vv7iYmJIRAMkpmRDopCIBDAdiwJeDQapbqmlqzMDGw2Gz6fj3AkgsNux+PxYDKZaGhsJCkxEYPBiF4/GNwKBAYj7h0dHVitVuLj4wmFQ4RCITweLxnpabS1tRMbF0s4FMbhsDMwMIDFYiEYDGE0Do6LDoVChMJh+vr6MZtMuGJdWMxmIpEI/kAAq8VCIBjEcOy1CYXC+P0+7HY7gUAAk8mETqdjYGBgsHwGPcFAEKvVgtfrxW63o9fr/+nzL4QYXeQCXAgxROp2IaRePJcdH0g6G4z24zmdOkmG8omP1NraRigUpLq6mlA4TCgUJhqJ0tPby759pVgsg8GU2NhYvF4vLS2txMa5cHf34PV5iY+Pp6Ojg1kzZ2K32ygvr6Tb7Uav05GUlEhbWztms5m5c2dTWVVNb28vep2e9o4OmptaBrv/H+tIFAwGMZvNWMxmOru60et1LFwwn/qGRtrb27UZecLhMB0dneTl5Wp5LIxGIwa9Ab1BjysmBndPD52dXSTEx2M0GtmyZRuuWBcmo5FwJEIoFMLn8xONRKlvaMRoHPyY6PV67HY7fX19xDidRKNRDpUdJjU1BbvdTiQSobWtjaPl5Rj0BlyxLsLhMDFOJ3X19ZhMgwE4g8GA3WYjEAxis9loaW3FZrXi8Xrp6+vDbrczMOChuKiQjIx0SksPoNPrMBgMtDS3sHDhfDq7uuno6KC7u5uiwkIOHzlKeloqXd3dzJ41C5tNZuITQohzgU6nY/LkybjdbqLRKA0NDR958TqUOPbEVl4hhBDibDKagzincrYdz6lIpmtxStGoSnNzC6kpKbh7elBVFaPBgKKAGo0SDoVAHUzSlpebc9yQNlBRMRgMxLpc6PUGenp6CAaD9PT2EgoFsTvsZGRkEIlGccY4URQFV0wMfX39KDqFgoJ8VFSi6uCFc0J8PMFgiI6OThITE8nISMfpcKKqKokJCccSpQ6QkBCPQa/H6XCg1+soLCjAaDQOdldExWg0Ut/QQG9vL7GxLmCwG2UwFAJVReX/hhLGxbqOvQ5RnE7nsVkccrVZj1QVmltasJjN+P1+rVcVqkqM00lmViahUAg1emy7kSgxTidOh5Ps7CzMZjMG/WBCv7S0VGpr60hKTESNqrhiYoiLi6X1WFfNqBoFVUUBcnKysVqtmIxGAv4AgUCQltZWzGYToBCNROnv7zvxdAohhDhLjRkzhmuvvRaz2UxxcbE2S99QD+WhegvgsssuY8yYMSMyfFwIIYT4KIqiUFJSQkFBATabjfnz5zN16lT0ej0pKSksXbqUnJwcbXmLxcLcuXOZMmUKer1ea6Q577zzsB4bJVNQUMCiRYuIj4/X1svKyiIrK+ufKqvFYmHatGna/aSkJDIzM4ctk5GRMay84h+ToXznsI8byqWqg0PjhobqDeVQ0uv1eL0+2js6SE1Jxmg0otfrCYcjwOD0zhaLBb8/gMVixu8PYLNZ0el02vA/g8GI0WjA5/Oh0+kwm82EQiGCwRBm8+AwtWg0qu3XYrFotw0GI6oaJXosUAYcS2qnYDQOjq8Nh8PodLpj422jgErwWO8kj8eL0WjQLsr1ej01NbWkpqVit9nwer2YTCYURSESiRCNqsfKrNfGBAcCAYxGIzA4vG+ozIOvQxgVjg09HExWZzabCYXD6HU6dHo9+mHbMWEw6Adfa5MJ77GhhKFQCIPBiMlkJBQOEzj2Og6+BgZ8vsHEfTa7nexjQTCj0UgwGMJut53W7IoylE+Is4sMWTj3KIrCV7/6VebOnctDDz3EhAkTeOGFF/jqV79KMBjk+eef55prrsHr9fLKK6/wrW99i97eXn7/+99rSVvF2UnqdiGkXhxN9Ho9a9asIT4+Hr/fT0dHB+np6VRUVJCYmEhHRwdLlizhN7/5DV6vlxtvvJHOzk6SkpJoaGg4lnImn/b2dpKTk9m0aROrV69m9+7dzJ49m/vuu49AIMCCBQvQ6XRUVFTg9/tJSkrSZr9LSUmhpqZGy+MEYLPZcLlceDweHA4HHo+HlJQU1qxZw/bt26msrATAarXS0dHBpEmTaGtrIzk5mcLCQqqqqtizZw+KomjP+f1++vv7UVWVmJgYFEUhNzeXsrIybDYbDoeDrq4uurq60Ol0FBcXExcXx759+3A6nRQXF1NdPTjaKC8vD6fTqe1zaIKRSZMm0d7eTm1tLRMmTECn03HgwIGTck59UWQon/jUhqbNBLQgzBC73UaePWdYi6vJpDv2f/g6x69rt9uHbcdmsx23vklbFwa/nI5f9/jnTuzod/yMAjqdbth6Q7mWhtZ3Oh3D1lVVlcLCAu1YHI7/e/7E4x5iMPzfx2Z4uU5ex+l0nnK5E7czdAxD+z9+eZPRiOmE7VosZiZNmnjS+mfa7ApCCCE+P6qqsnv3bvr7+9m5cyfLly/HaDRSXFzMv/7rv2KxWIiJieGDDz6goaGBXbt2sX37dglKCSGEOKNEIhH27t3LwoULj3UKMGA2mykqKuKxxx4jMzOTgYEBwuEwFouF9PR0nnjiCeLj47nuuuuIRqOsW7eO7u5u7rnnHqZMmcLWrVvZvn0748aNIzk5mYaGBhRFoaCggLFjx/Lcc89xyy238MEHH3DDDTfwt7/9jeuuu4777ruPcDgMDP62W7NmDa2trYwfP56ysjI6OjpISUnBZDJx00038e677xIXF8eFF15IWVkZaWlpx0bFpKHX64mLiyMpKYmOjg4WLlzI/v37MRqNg3mHQyGmT5/OwYMHufHGGxkYGCAuLo7HH38cgPHjxzNz5kwqKyvJzMxk1apVbNu2jWuuuYbNmzezYsUKKisrWbVqFQcOHCA1NRWHw0Fvby8LFixg48aNrF69mk2bNp0yGfqZRIbyiU/ss5h976MMzVLwSTryHb/OJ133+GMZ6iX2aToRhsMRbTaEYDD4uY8DHir353kuhBBCnPlUVT0pb1RlZSWtra20trbyl7/8hZkzZ7Jw4UJUVdV6BQshhBBnoo0bN9Lb20s4HKahoYGSkhLWrl3Ls88+SyQS0UbXJCQkkJqaSm9vL/39/aSmpuJ0OolEInR1dZGRkYHVasXhcGiz8+n1embOnIler8fn89HW1sbmzZupqanhgw8+IBqN4nA4tM4G/f39KIpCcXExnZ2djB8/nvr6empra3n//fe1kTQmkwmXy8XOnTvZu3cv0WiU3bt3s2nTJmJjY8nOzsZkMtHY2Mjhw4cpLCxk7NixVFdXk5KSgl6vp6mpCVVVWb9+PZ2dnQCkpaVRWVnJtm3bCAQChEIhdu/erfUGPHLkCO+//z7V1dVs2bKFxMREsrKysFgsNDc343a72blzJzNnziQ2NnakTulpkR5TYkQMBZCGclMdf5Hc1dVNbKwLnU530tScxy+rqiooCu5uN319feTm5lDf0EBraxuFBfm43T1Eo1EyMzMwGo1UVFTiinVht9moq28gOyuLQMBPe0cnhQUFmM0m3v/gQ5acvwgAt7sHRafgdDjo7Ooi1uUiEokQDIVQAJ/PT0JCPCaTidIDB2htbWP2rJm8sO7vfPlLN9DT24vVasVsMuHxerGYLfT29WpJ1+WHgRBCiH9Wb28vra2tqKpKVVUVgUBAG1qQkJDAypUrMZvNVFdX093dzdq1a2lubqapqWmESy6EEEL8H7/fT1tbG2lpacyYMQOPx8O+ffu45557CIfDLF++nAMHDhATE8Nrr73GDTfcQCAQ4G9/+xvRaJRrr70WnU7Hhg0bqKmp4ZprruGOO+5g79699PT0ANDd3c3//u//kpiYSEFBAY2NjYRCIRoaGohGozQ1NTF+/HhCoRA7duwgGo2yZ88eMjMzqampobi4mN7eXi2I1NjYSF9fH6FQCLfbze23305TUxMVFRUEAgF8Ph8tLS0cOnSIOXPm0N7eTmtrK0ePHgWgoaGBzZs3k5GRwf79+xkYGNDKCrBz506uvvpqJk2axKuvvkpzczN33nknLS0tVFRUkJubi9/vp6WlBb/fT3NzMzt37mTu3Ll0d3cTCoVIS0sjEAgcS39z5pIcU+ewkcwx1NfXz/79pYwfP5aKyqrBAI7ZDKqqdTHs7OoiJTmZ3mMf9ri4OHp7e5k2dQqqCgcOHAQFfF4f8QnxjBs7hs1bthIOh5kxfRr7Sw/gdDjIyMigt6+X2to6cnNy6OjopLi4iAMHD2E0GCgoyKeltZUJ48fz6mvrWX3xKrrd3bz99nuoapT5887jwMFDhEIholGV2DgXjY1NpCQnY7fbmXfeHHbt3kNZ2WEURUckEmbypEn09PXS29N7bLxyLbHHZumzWCwsu2DJiAemJMeUEGcXyaUhjm/AGbq8G2rkGapbz/Su/OKzIXW7EFIvjmZDvaJOlJycjKqqdHR0DO+sACfd/7jtfJz09HS8Xu+wANGQ4+vXU9Hr9YMTb51imaF8xR/13EeVU1EUdDqdVnefzjEdv8ypXpcv2unUSTKUT4yIoQ9tTW0dgUCA9vYOurq66Ovvxx8I4PF6KSospKOzi2AwqI3z9Xq9eLxevF4vev3gh9titZCRngZAZkYGnZ1deDxeDHo9ZrOZYHBw9rpYVyxNzc0AeLweDAY9UTWK1+dDURR8vsHtNjU10d3tJhIJk5KSQm1dPXq9Dre7B4NBz7QpU4hxOpk+bQp+vx8Y/MDPmjmDxYsXEB8fj8frIdblwmAwEA6HmTplEtFoFIvFQlJS4si86EIIIc5qQxedx198RqPRYYEoCUoJIYQ4031U4KW9vZ2Ojg6Ak4I8pwr6fNKgFEBzc/Mpg1JD+/g4Q6ldTuWjAlZDz30U9biOG/9o2VMt80lT3YwUGconRoTBYCAnJ1vrsWO1WrFYzMdacgc/tFarBYvFQkNjIx0dnaQkJw+OE7bbMRgMJCcno9frsdvtWK0WFEXBZrNRVFhAQkI8ZrMJo9FIX18/yUmJDAx4yM3NxmKx0traytQpkwkEg7jdbooKCwmHQ0yZPImu7m4y0tOYM3sWKpCUlEhDfSPFxcXYbFbsdhuzZ8/E6XQy+VgC8oKCfExGI1arFaPRSKzLRWVVNZMnTcTlikGn05OVlUlNbR2ZGekj++ILIYQQQgghhBBnCBnKdw4bDUO5VFUlFA6Dqp5WXqYTu3J+1GOnMjAwwN59pajqJ4+sfxJ2u52pUyaj041sh8XRcP6FEKdPhiycu0aim/5HDWf4R8Mc/tHzn2VZPut1RhOp24WQenG0SUlJYebMmVRXV9PS0oLb7T5pmfj4eAYGBs74fEliuNOpk6THlDijKYqC6disCKe7/Ok8dip2u5358+ae9r6EEEKIkTZp0iQuvfRSfD4fHR0dPP/889rsQ0NSUlJISEigrKyMwsJCAoEADQ0Nn2p/JpOJ66+/nvj4eJ588klaW1u155YuXcp5552Hx+Ph0KFDvPPOO4RCIe354uJiEhMT2bJly2ntKz8/n/T0dD788EMMBgOXXHIJL7300rBtAlx88cV8+OGH5ObmUl1dTW9v78dud/r06SxevBifz8ezzz5LV1fXJ3gFhBBCfNZ0Oh3XXHMNXq+XhoYGDAYDY8aMwWAw0N3dTVFREUePHuX666+nsrKSDRs2SHDqLCM5poQ4RlGUL/RPCCGE+GdlZGRw+PBh/vSnP2GxWJg1axaxsbFccsklXHzxxTgcDpYsWcKdd97J5MmTufzyy7n55pvJzs4mOTmZyy+/nHnz5qHX65k6dSqzZs1iypQpTJ8+HRicqrqgoEDbX2pqKqmpqdTV1TF16tRhZSksLOS9997j6aefJjc3lyuvvJKYmBhmzZrFkiVLCAaDdHd3M23aNOx2O0ajkVmzZmGz2VixYgWrVq3Cbrdr20tKSqKoqAgY/NEyffp0rFYr8+fPZ+3atSxbtgyDwUBbWxt6vZ5bb72V6667jvj4+I98vQwGA1dccQXPPPMM9fX1XHDBBdpzEyZMID4+HkVRmDlzJlarlYULF3LZZZeRkJCA2WxmxYoVXH755SQlJZGQkMCMGTNYsmQJsbGxrF27lgsuuACTyaRt0+l0snr1atasWUNMTMywdfR6/T938oUQ4iwxNHFHZWUlxcXFpKamcu+995KamsrKlStJTEzEarUSDofp7+//VLmjxJlNAlNCCCGEEKOUoigsWLCAG264galTp1JfX49er6enp4fMzEzWrFlDV1cXLS0ttLW10dXVRX19PV6vlzvvvJPW1lZmzZrFzJkzueaaa1i6dCk9PT2sXbuW+Ph4LrnkEqxWq7Y/vV7P9OnTmTZtGuXl5cOGyaiqSn9/Py0tLTz77LNMmTKFjIwMvve976EoCllZWUyZMoXc3FwWLFjA2LFjmTZtGldeeSUOhwOj0cg111zzscdrsVi4/fbbaWlpYdGiRYwdO5YlS5ZgtVrp7u6mqqoKn88HDAazJk2axPTp0wdn/mUwMW1VVRV33nknF110EYcOHdK2HR8fz6pVq0hLS+OCCy5gwYIFjBs3ju7ubm6//XYMBgNerxdFUfjKV75CTk4O3/72twmHw5x33nlkZ2fT19enDdXX6XTcdttt+P1+AoEAd955J3l5edo68sNKCCEGqapKV1cX1dXVWpCqoaGB999/n/fee4+YmBimTJlCR0cHdXV12sRY4uwhgSkhhBBCiFFKVVV27NjBunXr+Nvf/sbSpUtZsGABxcXFKIpCWlqaFphqbW2lq6uLurrBGXELCgooKSkhFAphMBgIBAI8/fTT1NbW8uGHH2qt1EePHgUGAy033XQTP//5zwmFQnz961//yPwtCQkJ+Hw+otEoO3fu5O2339ZmFXr//fe1XkNvvfUWY8eOJT09ncTExGFDM7xeL06nE0DrhRSJRKirq2PXrl0cOHCAuLg4AMLhMF1dXScFpvLz8ykuLtbWt9vtFBQU8Oijj/Liiy8yf/58bX87d+6koKCACy+8kE2bNlFcXExCQgJ5eXn09vYyadIkZsyYgdFoJCUlBUVR2Lx5Mx988AHbt28nEomwYsUKrdeX0WgkKSmJTZs28f777xMXF4fRaNTWOZtzXAkhxCfV2tqK3++nvb2dgYEBKisriUQipKenY7fbcbvdVFVVceGFFw5rMBFnB8kxJYQQQggxSqmqytixYwmFQuTl5XH06FFSU1Nxu93ExMSgqip9fX1MmjSJsWPH4na7Wbx4MdXV1ezatQuv10swGKSyspL58+drQZ0tW7bwhz/8geeff17L6TS0rbFjx+L1esnKyjrpx8Hy5cuZPn06+fn5/PWvfyUSiWjTbkejUaLRKF1dXfT09JCamkpNTQ3vv/8+BQUF9PT0UFNTo22rurqaa665hptuuonk5GT27t1LKBTSWsojkQjRaFSbnru3t5cVK1bw5JNP0tPTQzgc5sUXXxxWvlAoRDAYZNq0aaSnp9PU1KQ95/P52LdvH5deeinPP/88Ho+H5cuX09XVhdfrxeVyEQwG8fv92usxlJw3NzeX9vZ20tPTiY2Npauri2AwSHl5OTfffDPhcJgjR47g9/tPmdBXCCHOdW+++SYAbW1twGAdAPDBBx/w4Ycfar1Md+zYIYH9s5DMyncOk1nZzm1y/oU4u8jsQ+cml8tFbm6uFphpbGzEbDYzZswY3G43Ho+Hzs5OxowZQ3d3N11dXYwdO5a6ujr8fj8lJSV4PB5qampISUmho6MDVVWZNWsWl1xyCT//+c+HBVKsVivFxcV0d3fT398PoAWekpOTSUtLIxAI0NjYyMDAAGazGZfLRXt7O3a7HZPJhNvtJjY2FpPJRHt7OzqdjoKCAiwWC+Xl5QQCAW1/DoeDoqIiBgYGtCEeqampNDc3ExcXRygUwuFw0NXVhclkoqioSAsAfZShbXo8HqqqqrSeXGlpaaxatQqPx8PTTz8NQHZ2NgkJCVRUVOD3+xk7diyBQACPx4Pb7cZqtdLV1YXFYqGwsBC/309VVZX2o8lgMFBcXIyqqlRUVGAymbR1Pk9Stwsh9aIQZ4rTqZMkMHUOk8DEuU3OvxBnF7kAF58VnU7H1KlT6erqora2dqSL84VJSkqisLCQPXv2DAuOjUZStwsh9aIQZ4rTqZNkKJ84Y6mqSjQaHVVdNXU6ncy6J4QQYlSLRqPs3r17pIvxhevo6KCjo2OkiyGEEEKccyQwJc5YoVBIy2sxWiiKIsn4hBBCCCGEEEKI0ySz8okz1micRllV1VHVw0sIIcTo5nK5GD9+PDqdDpvNxsyZMykuLtZ68BYUFDBz5kxsNtuw9dLS0pg3bx4JCQnaYxkZGcybN4/4+HjtsfT09JOWAygsLCQlJeUzOw6j0cjMmTOHPTZu3LiTyq0oCjNnzsRgkLZVIYQQ4mwhgSkxKg3OhNNDU3MzAwOekwJCweD/zdpz4npDy0UiEVpaWnG73drjqqoSCAS0RKgAvX19uN3uj02kKoQQQnzRFEVhyZIlfOtb38Jut3PPPfdQUFDAmjVrWLJkCZMmTeLyyy9n4sSJ3H333dow84KCAu655x6ysrL49re/TXJyMmPGjOFrX/sa2dnZ/Md//AcJCQkUFRVx9913k52dzbe//W0SExO1fc+aNYv8/HwtIGYymbDZbCQlJWEwGEhISDhlD2K9Xo/ZbAbQgk4WiwWbzcYll1xCQkICTqdTe34owBYbG0tsbCx6vZ5LLrmExMREYmNjte06HA5iY2NRFAWbzaYNqx+67XK5tO2aTCbMZjMOh2NY2Ww2G4mJieh0g5fHTqeTuLi4YdtKSEjAYrHgcDiIi4s75f4BYmNjT9q+EEIIIU5NmpvEqKSqKm+8+Tbx8XEUFRZw5EgXSUmJqKh0dnTRPzCATqcjIyMdh92Oz+ejp7cXp8NBT08vkyZNwOPx8sZbb5OQkEBBfh7RY8GoquoasrIyMZtMRFWVSCRCV1c3SUmJ+Hw+xo8fh/2EFlwhhBDii6aqKq+88gqzZs3CYrGQlJTE73//e0pKSrjmmmv4/ve/T1lZGUlJSdx9993odDoikQgrVqzghRdeYPfu3QwMDLBo0SIyMzN5+umnOXDgAMFgkHnz5pGXl8czzzzD/v378Xq9LFy4kHXr1mn7NxgMXHvttTQ3NxMTE8OcOXOIRCIYDAY6OztJTEzkhz/8IT6fT1snOTmZyy67jGeeeYaf/OQnfOc73+HGG2/klVdeITs7mxtvvJHc3Fz+v//v/2PWrFk0NDSwcOFCpkyZgk6n49lnnyUuLo7rr7+ezMxMHn74YaLRKGvXriUcDrNnzx6KiorYsGEDqqpy0UUXUVZWxpw5czAajbz66qtkZWWxaNEidu/ezTPPPANAUVER119/PV6vl3379tHX18eyZcsIh8NUVlby3nvv8ZOf/ITy8nLy8vKoq6sjLS2NRx99lFAoxKWXXkooFGL//v10dHSwevVq2tvbeeqpp+jr6/vC3xtCCCHEaCKBKTFqhUIhopEI7R0dlB44SEJCAjFOJ+UVFWRkZJCTncX+/aUkJibS09OD1Wpl9+692O020tJTsVqtJCclUViQz5HyChobGomLi8Vqs2G326moqKShoZGsrEx0Oh09Pb3U1deTm5MjgSkhhBBnhKFewF6vl61bt/KNb3wDVVUJh8MEAgESEhK4+eabef7557XewHa7nd7eXgC6u7sZO3YsNptNC6B0d3eTnp6O3W7XHnO73eTm5mr7VRSFK6+8kra2Np555hmuvPJK3nvvPfbv3883v/lNfvvb3/Ktb32LxMREGhoatPU6OjpITExkwYIFeL1e5s+fj9PpxO12097ezh/+8AeuuOIKiouLcTqd2Gw2FixYwI9//GOCwSA6nQ6Px8PDDz/MuHHjmDp1Kunp6XR2dtLb28v555/Pyy+/zPnnn080GmXPnj1cffXV7Nq1C6vVypIlS2hpaaGyspLnnntOK9fKlSt57rnnOHLkCCaTie985zvcf//99Pb28qMf/Yh9+/bR1dXFb3/7W3784x/z3HPPkZ2dzcSJE0lPT6erqwu3283555/P3/72N8xmM1VVVcOCckIIIYQ4NRnKJ0YlRVEYO6aEJUsWM27sGNJSUyksyEev15OdlUVBfh79/QOkpaXh7ukhKyuTcePGMHXqZBITE4mPi8NkMjEw4KG2rp55c2dTXFxEdnY2RQUF9PT0oNfryc3NIS0tlfS0VBITE4h1uSSHlBBCiDOOqqocPHiQxx57jPr6enbu3ElWVhb/8R//wYcffkh1dTV2u5309HRKS0u58MILSUpK0noOHThwgGXLlpGcnMz8+fPZt28f+/fv58ILLyQ5OZkFCxawb98+bX+KolBbW4vZbCYvLw9VVfF4PIRCIQYGBohGowSDQYxGI3l5edoQt3A4TFlZGWvXruWRRx7h6quvprS0lEgkgs/nIxwO4/f70ev1wOCw+3A4THp6OhkZGbhcLoLBIMFgEL/fj06no6uri97eXo4cOcLzzz/Pvn37KCwspKioiNLSUrq7u+ns7GT//v289tprRKNRjhw5MiyXZU9PDzk5OcTHx5OWlobP5yMnJ4eUlBQikQjBYBCPx0MkEsHj8RAIBAgEAtr++/r6tP3X19fz+OOPs2DBAoqKir7Q94EQQggxGkmPKTEqKYrC9OlTgcGEqSsvWv4PlwfISE8fdv/KKy7Vllly/qKPXVdVVcaUFP9zBRdCCCE+Q6qqsnPnTi14M2vWLJqamtiwYQNLlizB7/cza9YscnNz2blzJwsXLuQvf/kLLpeLO+64g507d1JaWkpZWRlXXnklt99+O1u3buXw4cNUVFRw+eWXc/vtt7Njxw4OHjyo7be8vJytW7cSCoWYNGkSNTU1dHd3EwgE2LNnD6qqcuDAAfx+P3fccQc//elPCQaDAHz44YeYzWaOHDnC22+/zbZt2wiFQuzevRtVVamtrcXr9VJaWkpvby+PPPIIa9asIRqN8txzz7Fr1y5CoRCdnZ2Ul5dz6NAhrrjiChYuXMhbb71FIBDgzTffRFVV/H4/Dz/8MJdccgkAr7zyCtXV1VqPsSHr1q3jqquu4uabb+att97ikUce4YorrsBoNPL444/jdru1wNy+ffvwer10dHSg1+u1/S9atIh33nkHl8vFBRdcQFNTE/X19V/I+0AIIYQYzRRVun+cs/r6+nC5XPT29hITEzPSxTmJ3+8floR8tLBarVri1DPZmX7+hRCfzNBnWoiPkp6ejk6no7GxERhseDnxMvB0HztdVquVwsJCDhw48OkK/QX7Z471TCJ1uxBSLwpxpjidOkl6TIkz1lBPJSGEEEL885qbm4fdP1UA5nQfO10+n2/UBKXgnztWIYQQQnw6EpgSZyyDwTAs/8NooNfrJaAmhBBCCCGEEEKcJglMiTOWTqfDYrGMdDGEEEKIM5rT6SQcDmtJuoUQQgghRpMzPxGOOGcpijJq/4QQQogvQkZGBv/93//N1KlTWbt2LUaj8SOXnTdvHunHJgERQgghhDhTSI8pIYQQQohRasWKFcTGxhIMBunv78dqtXLRRRdhs9l49913Wbp0KWazmTfeeINrr72Wzs5OHnroIVpaWka66EIIIYQQgPSYEkIIIYQYtaqrq9m9ezfV1dXMnz8fh8PBzTffTGNjIwkJCUyYMIHdu3fT399PZWUl7777Lp2dnSNdbCGEEEIIjQSmhBBCCCFGKa/XS19fHwMDA9pjhw8fZvPmzVRUVPD+++9z1VVXUVhYiMfjwe12EwqFRrDEQgghhBDDSWBKCCGEEGKUCofDWsJzn89HNBqlp6cHVVXJzMykoKAAn8+HxWKhtbWVyy67jNTU1BEutRBCCCHE/1FUVVVHuhBiZPT19eFyuejt7SUmJmakiyO+YHL+hTi7DH2mxbnFZDJhMBjwer3ExMQwMDCAw+Ggr68PRVFISEjAaDTS1taGTqcjNTWVtrY26TV1lpO6XQipF4U4U5xOnSTJz4UQQgghRqlgMKj1mOrr6xv2X1XVYfmkotEojY2NX3whhRBCCCE+hgzlE0IIIYQQQgghhBAjQgJTQgghhBBCCCGEEGJEyFC+c9hQerGhLv/i3HL8UA8hxOgnn2UhxBD5PhBCPgdCnClO57MogalzWH9/PwBZWVkjXBIxkvr7+yUxpBBngaHvdCGEkLpdCKkXhThTnE6dJLPyncOi0SjNzc04nU4URRnp4ogvmKqq9Pf3k56ejk4no3qFGO3kO10IIXW7EP9H6kUhRtYnqZMkMCWEEEIIIYQQQgghRoQ0pQghhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhASmhBBCCCGEEEIIIcSIkMCUEEIIIYQQQgghhBgREpgSQgghhBBCCCGEECNCAlNCCCGEEEIIIYQQYkRIYEoIIYQQQgghhBBCjAgJTAkhhBBCCCGEEEKIESGBKSGEEEIIIYQQQggxIiQwJYQQQgghhBBCCCFGhGGkCyCEEEKIL040GqW5uRmn04miKCNdHCHEaVBVlf7+ftLT09HppF1ZCCHE2UUCU0IIIcQ5pLm5maysrJEuhhDiU2hoaCAzM3OkiyGEEEJ8pqTJRQghhDiHOJ3OkS6CEOJTks+vEEKIs5EEpoQQQohziAzfE2L0ks+vEEKIs5EEpoQQQgghhBBCCCHEiJDAlBBCCCGEEEIIIYQYERKYEkIIIYQQQgghhBAjQmblE0IIIcRHUhRFpqcfJaLRKKqqavfl3I0eqqoSjUZHuhhCCCHEiJDAlBBCCCFOyWQy4XK55AfzKKHT6ejr6yMQCKDX64mLixsWqBJnLp1Oh9frxePxjHRRhBBCiC+cBKaEEEIIcUp2u53e3l6CweBIF0WcBoPBgMvlIhAIYLPZ8Hq9eL3ekS6WOA06nY6EhAS8Xq8EE4UQQpxzpH+3EEIIIU5Jp9MRiURGuhjiNEWjURRFAeTcjTbSK1EIIcS5TAJTQgghhBBCCCGEEGJESGBKCCGEEKOGzWYnJiYGo9H4sculpqbhcDi+oFKJIVar9Z9a32w243K5sNlsn1GJQK/Xa+Ua+m8wGDCZTNpjBoORmJjB/Q71OhNCCCHEF0NyTAkhhBBiVNDpdNz7L/9CwO/HbDHzv3/+M319fVgsFvr6+jCbzeh0OuLj47n33n/hoYf+SENDAxarlb7eXoxGI3q9nmg0isFgwGA04vN6sdls9Pb2YjAYsNvtDAwMoCgKJpMJg8FAb28vOp2OmJgYPB4P4XCYGJcLr8dDKBQa6ZfljKEoOm699Ss88sjDWhJvu92OotMx0N9/LABkIByJ4PN6tfuRSETLhXXpZZdRkF+Aisqe3Xt48803cDqdeL1ewuEwVqsVk8lEb28vADExMQSDQUKhENFoFKfTSX9//7BhjPPmzeOCC5bxk5/8hC9/+Wb+9rcXmDRpMjk5OTz55BN85bbb2bJ5M1ddfTVtbW10dnTw9NNPybkVQgghviASmBJCCCHEqKHTKTz99FOsuvhixo4bx/Tp0wmHw+zbu5cJEyYSGxtL2eEyYlwu4uLiWHvJJYSCQWrr6kBVmTlrNhs3rOeSSy+jp6cHvU6HTq/j9Y0biUQinDdvHgP/P3v3HSfXWd79/3PKnKm7s13a1WrVLVlyly1hbIMb3UBMMc08xoAhxoEAoYXED4RXnjyQ5AmvXwg9oZhggyGAiTGhGONuYWTJki3LVtdqi7ZPnzn198eZc3Zmi7QyVr/efo21O+XMfc7MrPZ8dd3Xncvz3HPPcsWVVxI1onzve7dx3vnn09Lcwt69ewGPRYuXoCjwzW98Q5rDVykKpNPpsOJo5cpVvPZ1rwUU7r33t1xx+RXYjk1rSyvf+MbXede7bqR/oJ8lS5byd5/9DI7jEI8nuPd391Iul7nqyqvYuXMHr3jlK0kmkvzoR3dy880fYGJigie3PImu6fT09NDc0sJ9v7uXRYsW0dDQSLFU5Hu33YbrukQiES66aB0Hhw6ycuVK9uzZzdq1a1m95ixaW1pYs2YN2awfcm3c+Ef+68c/5n3vfz9nnnkmW7ZsOY5HUwghhDh9yFQ+IYQQQpw0Ghoauf76d1IulchMTLBwYQ+ZiQwLurtJJBL85Kc/YdMTT/Dk5s0oqsq+vfv4j//4D84++2xSqQbu//197N69m/7+Pv7rxz/i4NBB7rrrLpYsWcrSpUsZGRlh5cqVxOMJHnvsMX5//+9ZvnwFy5Yu45vf/Aa/+c1vePGLL6GQzxOLxUgmk8f7kJxwgmBq3fp1/Op//oef/Nd/sX79ejRN447bb6dULrFs+XKGhg7yi1/8gkhEDx+jaSove9nLufnPb+Znd/2MxUuWkM/nmd/ZSTrdxP7e/dxxx+0sWbKEVatWcdddP2P3rp00NDZy4UXryGQzNDc3h1M9zzzzTJavWEFTuonXXPMaNm3axEte8lJKpSJbt27lTW9+M3/YsAHwV8LzPK+uibwQQgghjj6pmBJCCCHESaO/r49vf/tb5HI5kskk+/fvI2JE2PjERpKJJONjY+h6hEKxwM4dO7j8pZdz47vfzZYtW/A8j4MHD+I4DtlslnKl4v9ZKlOulFm4sIfBwQHyhQKVSgXHcXA9F7NismfPHm666X3s3buHhx9+mEWLF7Fv3z6y2ezxPiQnFFVTuel97yObzbLpiU286tWvxrEdfve7e1n/ohdhWRaZTIYdz+1g5cqVvOlNbwIUPM8PhnK5HPf//na6Fy7k7LPOJpVK4rkehXwe27bIZXNUKhXyuRyPPvIIb3jjG2lv7+B/fnkPj/9hA8lkku3bt1OpVFAUhbVrL+TfvvSvDA+PcN1116EoSnVsT3Dw4EEuuOACdu/ezcqVK7noonV0L+hmbHyMbdu2Hd8DKYQQQpxGFC/4TUAIIYQQp7xsNks6nZ7TfVtbW5mYmKjr13O8xeNxyuVyGGREo1Hi8TjZbBbDMMJAwjAMyuUysViMWCxGptpjynEcXNclGo1imiaRSATbttE0DUVRiMZi2JY1rb+Q4zik02ny+Ty2bZNOpymVy1TK5eNxGGYU9NcaGRkhnU5TLpepVCrHdAzJZBJdj+B5bhgeKopCLpcjFotRqVSIxmLomsab33wdDY0NbNu2jd/+5jeA/3rato3necRiMUzTJJVKYZomlUqFSCSCaZoYhsHZZ5/NhRdehBE1uO273yWbzdLY2Eg+nw9fv3g8TqlUqtu2YRiYponruiQSCQqFApqmkUqlcByHQqHA8fj1uL29nZGRkUM+dyaTobGx8RiOSgghhDj6JJgSQgghTiMnezAlZnciBFNHQtM0VFX9k5qMG4aB4zinxHtUgikhhBCnK5nKJ4QQQgghjrkXIlCSxvNCCCHEyU+anwshhBBiRp7nSRPok5S8dkIIIYQ4WUjFlBBCCCFmVKlUaGxspHwC9VESswt6bIH/2jU0NKCq8m+QJwNd18PeWkIIIcTpRoIpIYQQQsyoUCjgOA66Lr8unAwqlUrY6LtSqeB5HoZhHOdRibmwbZtcLne8hyGEEEIcF/KbphBCCCFmZZrmKdFY+nQw9XUyTVN6MAkhhBDihCfBlBBCCCFm1NXVxcc//nGampqO91DEHBSLRb74xS+yc+fO4z0UIYQQQog5k2BKCCGEEDN6z3vewz333MODDz4ovW9OAueccw4333wzH/vYx+T1EkIIIcRJQ4IpIYQQQsyopaWFZ555RpqfnyR27NhBQ0PD8R6GEEIIIcQRkaVahBBCCCFOAVIlJYQQQoiTkQRTQgghhDihqKqKrusoivK8Hjv1a0VR0DSNaDQ6p23EYrHwuRVFQdf1uu0GZrteCCGEEELMnfw2JYQQQogTyic+8Qn++Z//mX/+539m1apVgB8CBTRNCwMhVVXRNC287brrruOss85i8eLF/M3f/A2apvHe976Xiy++mJtuuolIJBLetzb8Crajqmpdw/cLLriAr3/963z+85/nta99Laqqhvd9xSteQU9PT91Ypm5XCCGEEEIcmvSYEkIIIcQJQ1EUurq6+Nd//VdWr17NNddcw7Jly7jkkkvYuXMnv/jFL/jzP/9zkskkX/ziF/nQhz7E2NgYX/nKV7BtmwMHDvCKV7yCsbExrrjiCn70ox9x7rnn8oc//IGrr76a1atX8+Mf/xjXdXn5y19Of38/d9xxBx/4wAeIx+N8+9vfpq2tLQyZWlpa+OMf/8idd97J5z73OQ4cOMBb3/pWhoeHGRoaoquri9e//vV8//vf5+1vfzu7du3ikksuYffu3XzrW9/Cdd3jfESFEEIIIU5sUjElhBBCiBNKKpXik5/8JO95z3v45S9/yQ033EB/fz+XXHIJCxYsoFgs0t3dzerVqznjjDP4wQ9+wNve9jb+7u/+jl27drFy5UouuOACvvvd7/L+97+frVu3Ui6X2b59O9/61re4/PLLueGGGxgaGuKCCy7g1a9+NcuWLaNcLnP55ZdPG49pmoyOjtLf3097ezvLly/n+9//PvPmzcM0TTo7O7nuuusYGRnhne98J/39/bz4xS+WRuRCCCGEEHMgwZQQQgghTigTExP8/d//PVu3bqWpqYmnn34aXde5//77WblyJZFIhPHxcVRVpb+/n8HBQb797W/zt3/7twwMDNDb20s2m+V//ud/WL9+Pffddx+e55HNZslms1iWxVNPPUUkEuGRRx5h06ZNFItFxsbGeOSRR7BtOxyL53lccsklfPzjH6elpYUtW7bQ19fH0NAQjuNg2za//vWv+bM/+zN+/etfh2P9/e9/Tz6fP45HUQghhBDi5KB4soSLEEIIcdrIZrOk0+k53feLX/wi/+///T8OHDhwlEdVb+HChQwMDBCPx2loaGBiYoJVq1bR39/PxMQEZ555JrlcjqGhIdLpNAcOHKhbka61tRVFURgdHWXp0qXs3bsXXddpaWlhbGws/PPMM89kZGSEvr4+enp6SKfTbN++nY6ODgYGBnAch8bGRpYtW0apVGLv3r2YpsmCBQvo7e1l3rx5ZLNZHMdh4cKF7Nq1i0QiEY51cHDwmB63dDrNP/3TP/H+979fVug7RWUyGRobG4/3MIQQQogXlARTQgghxGnkZAimxPMjwdSpT4IpIYQQpyKZyieEEEIIIYQQQgghjgsJpoQQQggxq2B1OnHiUxTleA9BCCGEEOKI6cd7AEIIIYQ4Me3YsYNrrrmGX//61zI17ASnKArr169n//798loJIYQQ4qQiPaaEEEKI08iR9JhKJBK8/e1vZ9myZVKNc4LzPI++vj6+973vkclkjvdwxFEiPaaEEEKciiSYEkIIIU4jRxJMCSFOLBJMCSGEOBVJ4wghhBBCCCGEEEIIcVxIMCWEEEIIIYQQQgghjos5NT93XZf+/n4aGhqkx4QQQpzgPM8jl8vR1dUlK6oJIYQQQgghTmhzCqb6+/tZuHDh0R6LEEKIF1Bvby/d3d3HexhCCCGEEEIIMas5BVMNDQ0AqLpxCldMKdVL9TtFoaGhgY72DuIxA9fxsG0b13PrHuV5Hp7r4XoQ0SM0Nzf7x0tRoNpX3nVdPM/FdixyuRzj4+N4rothRNF1HU3TUDU13J4CKKqKoij+iBQFRfH876vHX1XUGV+LuuuU+u+DryevU1AUfz6novgXXdeJxxNEY3Hi0RiapqHrOqquoSoqmqqi1lyU4IGex4xd9D21fhyeh+M4OFN77isKVLfguf62FEXB8zxcNzjmHq7r4bmOf73n+Utie+D5/6u+hMFxC46nUh0D1e054VLawbjqtlM9Nn6liYfruLjh/UFRVFRV8+9fsx+q6u+rf3+X8P1UvY+q+cfM8/xb6vZthtdKpfpemHK9oih4ilc3Vv96f2yg1O13eJ+a5/E3WX8M/Ju96nj9Y+tfqsex+noEz6uioFa35br+viiKgue6eK6Hpmuo1SFMf2/Uj73urRAc6FkEbxUP0FRQ1Oo+VA+sUr2D5/n3tW0H0zQ566w1XHrZS2hvbSMSiaApKp7nkS8U2LlzJ5s3P8nw8DDZXJaIptPR0YERNchksoyPj5PP5ylXLBzXw7JdLNsml83TN9DP6Pg4lXIFt3rcgh2Zvt/BsZ++34dypPf1PI9SIRf+7BZCCCGEEEKIE9WcgqnaQONUDKZqQ4rgBFzXdJLJJPF4DE3V8DwbVdNQvCn774GDg+d6xGIxUg0NxGKxmpN6D9txUPBP6F3HrT6fQiQSQY/oaKqGogYBiX8yrSoqiqrUhCq1IYIShlbTgqi6MGKmMGrK64mCglfdvr/tSsXEsV3KxZJ/P7X6nPjhi6ppqIoSBjEKs70nlDAoqR2WB7h4YbgQ7HQY1ITJTc2JfRh8TQmiap9Nqd/3cNPh7YDiha9xMMXJf/nDVCs4lHWvCTVhg3/EgulRNUFWNaTzQy5v2rFRVAVVqT9mnheEcNOOnP8sCjXvg2oQpKqgeNP2VQkCTNRqKOKBFwRS/v5NBmnuzO8NPJQgfKoJ6vzXxgt3WVH9caiq/17U1MkQzg9ivWqIOSWWCn6GVF/z2p8pswWtSu1jp96uBsd98rVQVX/7QYgcibi4rsvY2BilYpH4wjiJeAJN0UCBWDxOuVxmcGCQSrmE69iYpomHRzwWBw9cx8ZzHRRFxbJdNM1D13U8FxoaGskXiliWjec44eviTh1rGEgFfx69YOr5PE4IIYQQQgghjoc5BVOnsuBE3aupEtFUjWg0SiwW808+wzDJP7kPTiyphk1+BqFgGAZGJIKqqjjOZFWOqqrguViWRblSwXFcNFVF0zU0VQsrj/zAhMmqlikh05SRMyWBmqyICa4P/jhEODV5HOorimwUFMcJnykIjJSaSilVVScrumpGFWxwsq4mfFY/0FAUHM8N7xyGU9XXIazOqak+CQO6mn2rC31mOQH3av4fVNL4+1ETDFQDmLoAqmabtbVgQUCkKJpfoTTDGGqrt2a6PQjQlGpcWXfgakILTQFNU1FdJTzG7pSKKW/aeP0wcDLQDK6bGlp6U4KS4GoPaqoCJ9/r4Hlu9X2iVF9VBc9TJgNB16tWu3nVbz1UN3huarZZE1QqtWMLjpZScxw8vOCzgF8F5oed/t1U/ACo5pH+uKqvZ1DfpSgK4+MTHDjQR8/CRcRicbRqqaCu66QbG2lvb+Pg0EH0QoFyuUyxUCCVTBExIkSjUYxoFMuycVwX1QVN1YhEIiQTCWLRKKVSCduxEUIIIYQQQggxd6d9MBWomyKnKiSSCeLxeFjFMxkA+FUo9YGRgq5rRAwDTdMmp7dVT7Q1VcVxPGzLwjQr4Hl+9VU1gPDzkMlgRvHAU2uqU4LkIAyaZh57zXBmvC0IfCbv5u+HH6wFIYcSTscKnigIi8DDc93qlC3/WAUDqgumglDK85/Fq/leUQBVmTbFKawgqtmFuoAnHHdNBU7NH3OqC1Fm/HIy4KmpFJptLEGFjx9MHSo4pL7iK3jO8KWsfaxSc1ttNZxXnWaphK+FUv3eU7y6aaWToZc6GUxVHxC8z4LKt7CyKJgOWvv28vx6tskxKuH+B9NWFVWpCyeDr93qFD5FIawOU8NxTR4HNRwX4fYnj2s18KoJ0pTqZzLYUBDaBtMIFYLqK6oVZsEUTCfcMQ+FUrHMnj17WX3mGtJNaTxt8njGYjE6Ojpo7uujkM9TLBYplytUKmVi8bgfVEejmJUKlu3gqi4u/nTFeCJBIpEgk8uBRfiZnvour38PCyGEEEIIIYQACaZqKpUUPA9cz0VXdL8Hjab5vXM8l/rpSEF1UTXUURWi0SjxWAxV0wD8k2ZNC/vu2I5LxbJB0dAiCpGIURde+WOhrjrE89zq7crkCXl4+2TlyWxBFNT3QKr9fnIr4AcRkzGM63goihtuP6gkmrKladU4YXQXVJd5StifiNojqCgzBDb107SmTiULq4tqHhgGXlMipMm7zbzvNU9SM5WwZjzK1NunVkFR80XtVLMp2w4Dufr3zuS2vTAcCqbzqYo6OcWQyWmNyrQNuPWVXDXBFLXTTWsDIb8ZE8FrF/YIC/YifDtNBlPhe6ymqkxRgiDJH6umqWFIGb5e1fdpGDaFIVL9cQ6qn+qDqdr71IwzfF7/86WiTAZganA74fHz+3d51SmnChXLor+/n5HREbq6OtE1Ha267UgkQktLK23t7YyOjhKPxzErFUqlUnXKbcT/maDraKqFrfhVYaqiEDMMUqkUiWwW0zRxq33ipr0nZr5CCCGEEEIIIU5rp30wFUYYNdVCsWiMZCKJrus4tu3fxaMu5wiCEs910VSdeLWqQq0JuhT8jMD1PCzbwrJtFFVFVxU0XQtyqerJuBsGUmHo44VJAUGUNHs/J8L9qJ+eWBPI1PzhN8f2ws3XzGqj5klnexLCu9VUdE1WfgVPpNRlMm7tWGbabO1XNWHQTP2kwn0NwpJp25r+gGCa2bQnnLGK6RCPq30Q4ctTt83JsVMX6k0N38LwB8KqpiCIxHP9cKpmbJOVUPVvyMnXeuqz1I54poCzOtUt3H71fVEbOoYVe2EkF24jrJiquX3qgVWD51Prg6ep4ZVSc79gE/VN9oPrFDSlGkhpwe2TQwyqFsPPoeKHVKVikWKxyL59e1mxYgVG1Ag/p7quk2pI0dbaSm8iQbFYxLYtSqUisXisOpXPIKLrfhinuLjVgEzTdBKJBKlUinyhgGXbNdNRp7wGdW8WIU58Z5xxBh0dHUf8uFwux5YtWw75M18cPR0dHZxxxhlH/DjHcXjiiSeoVCpHYVRCCCGEEDM77YOpMLOpnkhqmkY8ESeZSoYnt0EsMa1mqLoSmWH4wZQRjaKoKp7jhKu4gYLj2FQqFb+hsudWV+CbTIkmtz4ZxARNsoPKm6DXztz2yZus2AnDmykhFUz2z1aYHA8eeJMrBIaVR1OLjcKG5fWrz9UFMih11TvBimW14dmc9if8/+Tz1I4v7M112BP+6gSrmvCsfhxBQDTjw2qfuk79Uz+f0KFm5cAgPKJaMVUN3iYr1GoHNdMxrA8VJ6dIBjsxudpjGJzUVbS54XiCkKx+xcNqFVM1uApCocnm+TXbqqkcUlBArfammhpwqZPVXkGQFDyPf91kEAZK2HhfC6qpVPz3oecHWbqu1zXsV1R/mqFpVnBsm2eeeZZ1F60jlUqha/62NV0nHovR0tJCQ0MDE5kJVAVMs4JpVojHosSiBoYRQdd1bNtFdV0/mFIVYtEoyUSCqGFg2XZN2Fn7ulB3HIU4GezcuZNdu3Yd8eOO9Oe8eGENDw8zOjr6vB7rBNOghRBCCCGOkdM+mAL/F2hV1QAFXY8Qj8dRULAsqzodKDw9n3wMwVQhiEajRGMxv1IDv0H1ZD8nD9u2KJfLmKbpn6zrCq7nhb/8+Xerqf5RwFXc6i/21KUhM01RC/Zh2p81069qm7sHFKqFN1Oqb/CqtVQznljUVsXUnn0rNfsQJFPVUKQarHj+XWatgKp9ikOuJlazX2EYdoj7zfgENV/VVzUdYmw14VN9ry5qQqXZh11T7xQ804xJ12SFkld7z/rbpj0mGMnMgw8qAoPKu2BqXW3x2+RzVd+ETv0WJ6eBEoZn/nZqr5/6nPX7NRkX1VxXu0+11Ws1j5m6s2EVlhKmt4DnB0W6/1n2qr3hFKX6ebNt8Fyefupp+vr6aW9vxzAmp9Rquk5DYyMtra0MDw9RiOhYlkWlXMZKJIhEIsRiMQqFUrgaoaKArmsYkQjJZJJkMkGpUsZxrMlwVpIocRIL/p4TJxev5ncMIYQQQogT3REFU8GJ4Kn2r6C1U6miUYNYLIbr1TQ8n9Zfyb/O9UDVdGKxmL8aX9ArKNhmNVhyHAezYmKZVnUKn4LnetiejesGgZEbVn0EIYeHF94+k9rwaTKsmT69q/Y+/rc1p8xBaFRTMeVVK6bqQ5rqCfaUahfC6Vg1FVZTj1cQsKk18cch3kNBc+9Zbq0Pkw47NXB6KOA/fvI1n9xXtzZPm/X5p37lBf9TarKkWUY0081T9yHsUVT9X1h1pEwef8+rPVmsG8kMzxEEjcGtflVccBynRSeKSlAA5VXv5U9nrEmywmdx63Z6yqy/ujEEVVWKUrtX3pSD5k55VM3YpgWNk9sIqzM8F8Xzj6Ft27iOU/0cObiOi66pWGaF7duf5YwzVpJIpsJtqapKKplk/rx5DAz0USjksSzbD6cqFSLJpB9CRw1My8Z2rLpwKh6P05BqIJvPUypZ095Gp9ZPTSGEEEIIIYR4YZz2FVNhfyTPQ9c1otEomqbVTIebPDmerBwJH42maui67jc9r5kiNRngeZgVk0qljOM66Kjgges62I5TV/FR9y/TYQbghc85tSJlcizeDCftM/CmfxvsV3iTVw0iwmtqAw3/zyBUUL3JoKFu83X3r27fA9za8pzZ05/JiqvJuxuGga7rlEolFE1D1zQsyyJR7QcEhF8fLjitHWcw5XK2YxeNRjErJrF4DKf6ehmGUa2kmezBMXuF19ziCK/mdQaIGAa246DrOqZlEjUMTMsiGjHQNQ0Fj1KpVJ1SWRs6zvaMypT3SE2YhII600PqXsfgDTlZ4eQFH5Ca6ZoeXl0lVvgcU9/P095fMwt6XoXbn9YvLagIdHEcF9dxcB0Hx3FwbBunOq3WdR3wQNMUSqUST2zaxMUXX0xTUxOaGvWn/qkq0ViMpuZmmpuaGRsbpVQqUalO53NiMSJGhGg0SrliYls2jq3g4ldl6boWruZZqZSk0kQIIYQQQggh5uC0D6aAcMpcJOJP41NVNezNNBM/unFRFDCiUYxoNFzdi7Dhsl/d4rkeplmmUi7i2RYV1wHXr47yt6HUjCFoej7lxN2/x/RxKDNME6pNBGaa3jVDODX55ywJwZTQJai+coPHBAlX2Edn+rGbzCRmb98++TT199A0jTe88VoW9fTw7W9/h7POPov58+dTKpXoaG9nz969GIZBV1cXfX193H33PRiG4VfM1IQDk5VWk2OaDLGqAUjNU8+b18EHP3gL3/ve7Vx99VU4js2mTZu58MK1ZDNZ7rvvfiqmydjY2GSz+xmruKYf1xmOTk3vK5Wbbno3m5/cQmdnJw89/Ajvu+k9fP0b3+Tqq6+iq7OTsbFRstksP/3pz3Bdt257c4rCwql9/uvpULvrU2vNlPBa/+1Zu+rc1OmDSv3t4dPNNqoZjk3ddNPJBv214V0wTc91Xb+60fWnrTiug2PZfijluv6xqR4fBXAcPxTevHkLe/fuY0F3N4Zh+FV/qoqq6zQ0NNDS2sKBviiqpuF5YFZMbMepVlRGKRZLVFQNVXVQPCXsu2UYBqlkknKpQKlcnrEnmRBCCCGEEEKISTMWSpwuJhtx+1PoYrEYyaS/Gl9w+4yPw59ip6CQCFbjq/aXCvrVBD1wHNv2e9RY/qp8lmlSMStYto1dvVhWtbKjeiLt1jUIn70GJugdVXchqHea3LfgNtf1/OqRGS5hP6vDmpzS59VeqnUtHp7f5HzWkddGYfX38rzplTbgT4W8555fkslmsB2HJ5/cQiwWpbt7Ad/57m2cddYali1byne+812WL19GPB7j+uvfxsUXr59x9LXzw2pXg6tNU5LJBG95y3WYpkVjYyPgsXTpUvbv72ViIsPDjzzKgu4uXvva13DllZfz539+E+9//0287vWvre7L5DGd6XWquaEalk0ehwULuojH41x88Yvo7e3lda99Da7r8tprrmFgYJBSqcR3b/se8+bPI5FMTr6GMx7vuQvfN/ih6eR/Di4OHg6u5wdBTvUSrBvo4eEp1Qt+aOnWvsJT36eHuDDt/kHAVH0Puw62Y2PZFhWzEi4s4F8sTKt6MU1sy8KuVk/ZjoNp+YHVgQMH2Lp1K4VCYTK8VPxwKplI0N7WTkNDo19Bqar+59c0AYhGDQzDQNNUFFUJG6CrqoIR0Ukl4iTiCTRNZ1pwLIQQQgghhBCizmkdTAUURUGP+L2iotFouDx97fSsuhN+T8F1PPRIhHgigRGJhEvUh72X8AOVYqlEoVjEMq3wBNs0LWzbxrGrU44cv4+N4zjTwrBDnbwHp/21J/C1QdT0xxz+Uv+YyVClZkSHffxU0+qHpu1H7XanixgGH/rQX1CpmCxY0EV7exvlcpliscg117yGvr4+JiYmeO1rX8PIyAimWWHnzl309fVPG0fNpDAUZfIy9bmj0Sh9ff2kUklMs8LY2DgHDx4kFotiGBH27t0HKHR3L6ChoYGtW5+i98ABDMMgqBeb62WS/7550YvWs//AAT9wsSzOOmsNP/jhnZx37jls2bKFhoYGXvOaV2NWzHAa48xHeurez3Td1JEcLtqa+TGHqofyYEqIeZhj4k1+7eLPAHXx/GjMc7EcB8u2MWsuluNPjbVt/7NlOw6262C5LrbrYFf/dFz/MzaRzfLEpk2Mjoxg23bdmHVdJ51O09baSjKR8KvvHJtisYht20QMg2jUQNM0NFX1QylFRVUU9OqqnolEAq26oIKEU0KIE52qzvzrYPC7jRBCCCHE0aR4c+hkns1mSafT6EbslGp+HkydUxSFRCJJe3sH6XQaRVHCaWCe61dTeWEzdAXP9VcqSjc1Ma+jg0QySUTXw4onx3WxbYtSscTwyBCDAwMUioWaMEZBVfxt1p6z6nqESCSCpk3/BfFQvxjWvR7ezDFBbYP3IzM5lsM+dpbnPpTp21SmXadpKgsXLkRRVNLpRoaGDpLP56lUTObPn09fXx+qqtLV2Ulffz9mxaxvHD6lp9WRTCtrakqTzxdYsKCLiYkJTNNCVVUKhSLxuL8SYywWp1gsomkaqqqSzWanbHOOx6Q6BbK5uZXxTAbDMIjoOrF4nNHRUdpaWxkZHaWzs5NIRGNwcNBf6fGw6sMRf/bdIV7LKTcd/r1zuH2seVd4yiEmc1a3FXReB/+YuA5Bs3fXdf1QN+gjVb0Eny3H9qfxBdP8po5OBbTq9MPly5bxv//2b3nFK15GU3MzaIo/zdayyeWyPPvsdrZseZLBgwfJTEygqCptbW0kEgny+Twjw+Pk8wUqlo1pOVRMk4ppUq6UGR0bp79/kEKpCB5hRdtMx3cujuRzG/yMLuazZDKZasWfEJOCv9PnorGxkb/+679G0zSy2Szf+c53OHDgQN19uru76erq4g9/+AMXX3wxe/fuZWBg4GgMXcxRY2Mj73znO/nGN75Be3s7b3zjG/nKV75CT08PL3rRi7jjjjvC+0ajUT74wQ/yla98Zco/dsBNN93E448/zubNm4H611ocH/JzXQghxKnotO4xFZzsqapKPJ4gFosB/snvzL2CfB7+vyImqkvI1/5L42TPKDDNCqVqlYXr+ifNwbZRwHUnKzWCvlb+MvT6tDAg7D/FzMHKXMPC2u0cnjLD14cONMI18444u5w9OLJthz179ga31gVge/bsCe+3u+brYBz+NLmpfZCmZFW1w50Sro2PTwCwd+8+f5s1DyyVygDk84XD7tYcjh7BCoeZiQlQFEzTpFKpUCj62x8ZHQGU6glfbY1R7ep0M224/jX3V12c+QWacwA6ZftzecGnNnifeQButWeTv1+u6+I6k58b13XDMCoIqWo/r7bjhP2n6jYbXib3b3hoiCeffJJ16y8imUoSUSL+fXWNRDJJW1s7Tc0tjI1PEIlE/NCpXCYei2EYBtGYQblcwXZcVNVF01R0TUOrrtSZSCYoVcq4jhuOwQsOV3CFECe4RCLBypUr+fSnP82rXvUqrr/+er72ta9x3XXXoaoqP/7xj7nuuut42ctexj/8wz/w0Y9+lD179vCFL3yB5cuXc9FFF3Hfffexd+9err76anRdZ3R0lH379tHX18fLX/5y/vu//xvHcY73rp5SisUiL3rRi7j33ntZv349f/EXf8Fdd93FNddcw+joKGeffTZXXXUVmzZt4tFHH2V4eBjP87jsssu44IIL6O3tZffu3fT09NDW1sZVV13FHXfcwVve8hauuuoqbr31VjZu3Hi8d1MIIYQQpwiZyocfMkWjBhHdPzEN+tkE09lq+/d41eBCr67OpVb7Ubko1UtwPw/TtihVKjie33XJdh1M26Jc7YvjOP4S9o7tYFeXpXdsJwyPVKX+5ZktLJt2El7TN2nqlMQjr5jyt63U/H9OlNkvXu1/06YhBqHCIaYZEkwzDPo1Ef4ZXKb2KAouQS8nr2aQ1bgk3PZsGcvhq8FmG7M/HS2cmjaHy9Tpl1P7P01O2STs+3SowOf59XYKepO5h7g4h5w+WndxvZrP1SzP4wTB02QAZdsOluX/GVRLBQFV7Xs/GNNMoZSmqP4UXVVBUVQUFMrlClu2bKG/v59yqYxbndKnqCpaJEI6naa1pZVYNEok4v9sKJfLmJZZXcHTqFY4amiq4k/r07RwEYVELI7CISpMT43CU3Ea6O7u5gMf+ACvec1r2L59O4qikM/nWbp0Ke9+97sZGBhg//797N27l6GhIZ5++mna2tr40Ic+xK5du/jbv/1bFi9ezP/+3/8bwzAAuPHGG1m3bh3nn3++hFJHgW3b/Pa3v+XlL385F198MXfeeSdXXXUVF154IZs3b+bWW29l165d3HLLLSxbtozXvva1LF68mI9+9KM89thjvOIVr2DdunX+zzfNX334xhtvZGBggN7eXvr6+o73LgohhBDiFPK8KqYOVblzslFVlVgsTjyeQNO1uvAD8IOU2hN3AEUhGosRiRooqgqqWo0PlDC4cjyHilnBduywqTK1/XU8D8VToNos2vP8VcUs20LXdTRN8yuQPCUMYg51JhuML5jGM1sANdNth5/apoCihqUuh5rQNWe1U5uCR3uEq7wFQ2ppaQHPY0H3AnK5HOVy2a+UsWyKpRItLS10dXYyMDhIW1srBw8exHU9VFXFdV1UReHg0JA/JRO/eEqpTmPzlKmVRi4rV66kqamJffv243ngODZRI4rjOExMTNAxbx6LFy+iWCySiMfJ5ws888x2YvEYkYhOPJ7ANE0ikQgH+vqO+ISru3sBi3p6eG7HDmzbobExhed5dHS0E4lE2L9/P8lk0g9ITAvXdRkdHa1/z0491DVhm/9au0x9FSffX8H7CPxplZO3z/iWCqvkDi8ch6fUjXSmnmnBe9R1HGzTqn4G/CCsNoAKAq1w6l71fVVTDBeOT6muGOjiL1xgOw7P7tjBs8/tYPmyZcSiEVRNA1VF0TSSySTt7e0kkkmyuSxqqYRtWVimRSRiEDUMotEIpmVh2Wq135RGRHOJGQbJZJJYLEqh4Mw4LiFOFvv27eMLX/gCS5cu5aabbiKVSrFmzRrGx8fp6Ohg48aNYWAxMjLCc889RyKRoK2tjUWLFrFp0yZUVeWZZ57h9ttvxzAMrr/+et7xjnfw1a9+9Xjv3inrd7/7Hd/61rfYtWsXt912G1/+8pfZsWMH+Xye+fPns2TJErZu3Rr+DI5EIti2TX9/P+Pj44DfK/OBBx7Atm1e//rXMzg4yMDAAIODg8dz14QQQghxivmTpvKdCv2mdF0nmUyE/4o7tSqpPvAh/DpqRNF1A03VUVCnhQK2bVEqlTBNKzzR1lQNR3X8qhHF9U9Sg0DAm+yfYzs2qqOiKpPb9YJyLWV61VMQKgQn3oTbneR6nr9gIAqeMmVa4JT7TgZh1f0FFMWd8zGtfeBMAVlY5TRt6lntqbsvmYzT2dnJ/PnzcF232ohVJZ/Pc6D3AKtWraT3QB8tLc24rsMVV1yOaZpYpklrWyujo2P8zy//h2KxMrkzYeAyfS5fOp2mc/58li9fRqFQxPNcOjo6cGyH3/z2t8yf38G8eR2MjY3iuh4rzlhBLpdj6bKl1ZXaNHp7e3Ech4HBQX+63xEUmsViMVKpFOsuuohoNEpzc5qHHn6YZDJJKpWipaWFeCxGuVJBUzX27dsX9nKZdeppXbDqVYOa+r5TU8MhOILquukzJWflP81kn6mgam5yDPXvM8e2ccMeUtX1/2qm9IUr6tWMnvCzOvmpdD23GgRP3tf1YHRsjE2bNnHJiy+mKV3Ts0NR0CMGTc3NNDc1MzE+TjFSpFLxqx1j8ThGtWqyVDbRNBvbdqqrO/r7ZxgRkokk5UoFx54Mp4Q4mTiOQ0tLC5/97GeJxWI88sgjlEqlsB9jLpejv7+f9evXc/nll7Njxw5uvvlmPvOZz7B582Z6enrCoGNgYADXdSmVSjz44INce+21PPXUU8d7F09Z/f397Nu3j3vvvZc9e/YwNDTEr371K/r7+/n1r3/NokWLyOVyDA4OMj4+zr59+7j77rv58Ic/TEdHB1u2bCGXy/krnNo22WyWgYEB1q9fz0tf+lLuv//+472LQgghhDhFPK/m57VO9mAqHo/T0TGPdLoJVVXr+tfAlJN6z79E9Agd8+bT0tziB1qKEva1cT1/ZbCxsVF6e/eTmchUG6IH05L8KXue66J4QXNm/zlUVUXXdaLRKIZhVPtOVe8RhmLUndTPpraKxQsGHzy25s/ZgqnJfKtac1Lzss+1QsYf7/TnmVYrVVsN5tXfrmka8Xgcy7JQFL/nkK7reJ6HZVkYhoFpmnieRywW8yvPbIeW1hYcxyGbzfrNXGfax5oDFEx51HUdRVWI6BEs28KIGHR1dXLgQB/lSplEIonr+q9jpWKi4K/gqGpaTdN6hWjUYCKTCZ9l5o9JTUVa9ftIJOKvAmfb1dUhPSqVSviISEQP3yugYFlW3fS1ae+JmvfP9HBqltcgGNUcg6kjmR4aNP+v/T44NkHwVDt41/EXIPBc/7OlKJOB+NRpe7OGoNTHcMHqmSoKmqawbv06PnPr33Lh2gtIppIouo4CuJZFJpPhqae28uTmJxkZHqJQLBA1DJqbmzGiUQqFEqMj42TzBUqlMrbjYNl+I/R8scjIyAiDQ0NUymWcmunB4QdqrtmfND8XL6AjaX6uKApNTf7fj5ZlkcvlUBSF1tZWSqVSGDS1tLRQKBSwbZvm5mbGxsbQNI3m5mYmJiawLCtcOGDNmjV87GMf46c//Sk///nPj/Lent5SqRSVSgXLsmhoaKBcLmNZFpqm0draSj6fp1gs0tDQgGVZfOxjHyOVSjF//nxuvfVWRkdHw0U2DMOoe63L5fJx3rvTk/xcF0IIcSo6rZuf+yuqxTCMaHgyN1O/pkn+CX0sHiMei/vTfoLKIyYDHcdxKJX86gq32l8K/GmDarUPjROekNf32HEcB8uy/PsrKq7n1gU6M52gTjblrt5vhhDkUAuqTb97bYg0/fEKysyPmRxNTdPvyWBqeu+f2vCstmqmPvAMVgkKe2+pajiG2ttqVxPK5rIzbqt+Fycrg4JxmqY5bSyjY6PhY4KQa7LaS5nheAcB0OTrMmuwMKVIzDRNCoXaZur1FUGlUrA9ah7oTRbezbCv9b2zJu8782D87R7JKo71jdUPHVTPpUdabRAbpMG1n5VDbedIe6g5jsuePbvZ+tRWVq1a6feNCqbz6TpGNEp7Rwfppiby+SyWZeK4bhiKRiIR9IiOpij+wgUu4WfciPhN0GNRA8sy/V5g7skd5IvTj+d54bSu2uuGh4frrhsdHQ2/Dm5zHIeDBw+G1+fzeQB27NjBJz/5SUZGRo7WsEVVcMwBcrlc+LXjOAwNDU277f/9v/9HY2Mj2WyWUvAXTpVd7cNX+1oLIYQQQrwQTutgyu8v5a+wFVTjBFUYtdMUa79WNY1EIkksHkPXddSgpxPBObSHZZmUSiVsa3LVvaAiR9N0gqbWru3iF2ZNnmx7noddXe4+mJoXnoRPDVOU2jqQyRPe2vuHz68G9525Mmk2nuvVBSe1qw7OcG//PkFVVnV8Sm0wVQ1iDrv624xZkjet4iscy0xBBdPDsHA/6sK8QxzDw45RmQyFwtdxcrN+cczcQ56pezBLgjTjsIP31aGHPvuNtdVMz3fM9dN7ZwqPphcJzTyicKfCQK32c3KkpuR/4dYUYHwswx//+ARXXH45DakkkWgUqitt6rpOOp2mvb2d8bERSqVidYquSSwWQ68unKAbOpqlYTsOmgqaqqJrOvFYlEQiQalcxrbN6mdhphG9MI78PSbEsWeaZl0oIk4cpVJpWiAlhBBCCHG0nbbBlKIoGIaBYUSr06Kmn/BObfLuuR664Te4NgwDXdOq581uWFLkei6VSplyuYzjOnXbmjxp1FFVBTR/akRtf+xgFa+gaqpuPFMqo/xBMaVaanooVX/3w5zczzATTJn69QznvrMGQLMEHJP7Uj9NcKYpZTOO7zDpRm3odugqOGbMCGban/owbOrthwnamC3M+1NMD9QO6ZA5lxdOEJ2882ToeqhN+kGjWvd58bzpPcmUQ781p271EN8fuXAa3wxTGE3LYtu2Z9i7Zy/dCxb4+1xN0HRdI5lM0d7RTn//ATKZDJ5X8hueWxbRmEYsGiVmRDErJpal4CrVabmaRtTwg6lcLo9p1n/WhTidBAtSvND3FX+6ufQLDX6HkddFCCGEEEfDaRtMBdVSsVis2stpcin7WSkQjUaJRWNomo6iaICH4oHi/w/HdigW/Wl8U6cWhdPQFPBcBVXR0TUdy7bCqXzgB2BBtU9dwDLzvDsmM4AZpjrNUnkUjKfu+kNmVTX/r63EUpRDVl/VVp/VTfmaYV80TQtDwiNzbKZHBRV1tfxm7FOr16qjCnprHaPxzcSpNg6vjmhOj5l8meYaeCmgeNVwCr8KcNpDa1elO1xVz+FPkOYqDEZnuD6sdHQ9BgYGeXrbNi644AIa0g0oRAAFVdWIx2K0NLfQ0JAmGh3FMPxm5uVKhUjEqDZBN9BLOqqmongequZP59Or0/misSjFUjlcRMCbaVBCnIBUVeWaa65hxYoV7Nu3j5/97Gecf/75PPnkk2Hvobl43etex+OPP05fX99h7/umN72J++67j+HhYRRF4cYbb6SlpQXTNHn44YfZuHHjn7JLp4VkMslb3/pWduzYwQMPPADA0qVLee1rX0ulUuHHP/4xY2NjvOxlL2Pt2rVs3bqVe+65h87OTq699lpc1+UnP/lJuLjGGWecQU9PD7/5zW+O524JIYQQ4hR12gZT/gljvL6/1CFOiF3X9U9S43F/9TVVR1FVPMdfiUvx/Mofy7YoFIJgqmY6W7ChIMRQFf8E1lBRNTUMpvw+Uy6e5/ebCloMTW3WXRdXeVNvq02Opu/LbM3LD3W+f8h/TFVm3+ZcJRIJvva1r7Js2bI/aTtH04MPPsinP/03YTjV2trKdde9udoTaub9P57rsCkKFIslfvTjH9dcdzTGo0z/esZiriCaOvbH5JAT5xSFfL7Axo0buerKK2lpbUYzoiiqgqco6JEIjek0La0tDA4OUqlUKORzVCoV4rEYhuEHT4aho1VUbMdBVRU0TUPTdIyIQTweJ5fzG0Pj+atk1k75FOJENW/ePC688EK++MUvMn/+fBoaGvjQhz7EPffcw913381ll11GQ0MDv/vd79B1nTVr1rBgwQIefvhhdu/ezSWXXEJ3dzfLly9n69atrFmzhvPOO49nn32WzZs3s379ehKJBBs3buT888+ntbWV1atX8/DDDwP+36HLli3j3/7t31BVlQ9/+MM8++yzrF+/no6ODh588EEKhQIXXngh8+fPZ/PmzTz11FO86EUvYunSpRw4cIDt27djWRZXXnklY2NjPPDAA2G/pFOVbdsMDg6yZMkSHnjgARRF4frrr+d73/sebW1tvP3tb+e+++7j7LPP5mtf+xo33HADa9eu5fLLL+fuu+9GURTe9a538fnPfx7P80ilUrS1tdHW1saSJUvYsWMHL3/5y8nlcmSzWR555JHnNc1aCCGEEAJO02BKUZRwik1Q8eI6bnhb7S9XtYFR1IgRi8WJRCLTKnuU6hL1pmlSLpWqv/ROVpH42/LPQcOKj2rAoWla2FNqslzev4+DX36ieMpklZECQToVrK8W1jMdpfP+o927JplMcvXVVzFv3ryj+jx/ikhER9O0MJhqaWlh69aneOihh6htGB7wa6WO77SHG991w5ymabzwlFnSzBPhxKW2cg8Uz8M0Kzz99NM8s307S5YuJpqIoyj+Co0eColEnPa2dlINDRQLBYqKimmamJaJEY0RjRp+M3Rdx3ZccDwUxUXTVKJRg1QiQb66uqTjOPWH4ei0mxLiBTE2NsbExAQf/OAH2bBhA3v37mVgYIAtW7ZgWRbZbJZUKsX/+l//iw0bNnDJJZfw85//nBtvvJH/+q//Yv369dx1111cffXVgF/FOTAwwNve9jYGBgb4wAc+wA9/+EPOPfdcVq1axW9/+1te/epX140hHo9z/vnnk0gkmJiYwLZtisUihUKB9773vfzwhz/k1a9+Nf/+7//Ou971Lr7zne9wxRVXcMcdd/D+97+fSqXCS17yEjZs2MDZZ59NpVIJg69TVaVSoa+vj9bW1vC6fD7PueeeS0NDA8uXL2d4eJjHHnssDOvOPfdcMpkMZ599Nq7rsnz58rq/P1paWnjf+97HD37wA2644QY2b95MqVTiXe96Fxs2bDjlwz4hhBBCHD3PZ97USal2VThVU/0eUXok7JnguE4YOEyf4uZPQ4vFY8TjcTRdrzYTr+dUf1kul8vTGklP9lqqndI3pe+SqqAqKooSrN6no+vV6W1Bw3ClprF4EHhNaR506BBpMvwKV6Pzr62GaGr97cr0wOXoOPLnCFaL2rRpE4VCAcuy6O/vJ5/PY1V7AOVyOQ4cOMDw8DCVSgXbtrFt2w8HXhD1IWbtZaYQJh6Ps3btWtLpNIYRQdd1oobhr+JmGACceeaZLF26lEgkwryODn9KmK6j60eeIx+rGGiGNufH6Jnnbuor4lGdbgm4rsfwyChPbd3KxEQGx7L9z1X1MxeJGLS0ttLW1lr92aH70/nKFRzbRq+u4Of3nlPxW8ipaKpGRI8Qj8dJJRNEIjWvYfgemT5tV4gTheM4fPOb3+QrX/kKL3rRi+jp6SGbzdLX18fq1as5//zzKZVKtLS0oCgKW7Zs4emnn8Z1XebPn8+zzz7Lzp072blzJ7FYjDe84Q3hCrWJRILe3l5++ctf0traylNPPcVzzz3Hnj176sbgeR6O43DZZZdx11130d3dzWWXXYZpmqTTaRRFYdu2bTzzzDMUCoVwVbmhoSHGx8dRVZWenh66u7vJZrNTVj499QV/h//Hf/wHIyMjjI2NsWvXLkZGRliwYAEACxYsYGhoiO9///v09vYyMTHBrl276v5R44orrqCtrY3R0VGSySQDAwMMDg7O2BNTCCGEEOJInJYVUxE9QiwWR9f1MEQI+ztNqfIIbotEIqRSKRLxOLqmT2veHVRLFQp5LNsKr69V3+Tbn8ozrZFoNXvyPFBVBc9TURSvJtiqqY8KelEpargy3bT+UTNWY9TMs6qbhneY+Xon4Inzhg0bSCaT/Od//icNDQ2MjIwQiUSIxWK0tbWxcuVKfvzjH9Pe3k4sFsNxHMbGxnjta1/LihUrXsCRzBDNzHDsW1tbmdfRwfnnnUcqlWJiYpxUddy6pvG7+37P6jPPBAVe+tKXMDY6hmEYNDQ0UDEr3HHHD07M5rMn3lvjEKa8MIr/WSqXTZ56ehsHDx5k3rx2kpoKmg4KaLpOY2MjHR0d9B3oI5LPo5TLlCsVKmaFeDxJNGoQjRpUTBPbdlAUD01V0HQNw4iSTCaI5aKYplld/dNFQcULKyAVPMXjT50WK8QLqbm5mZtvvplMJoOiKAwPDzM6Ospb3vIWNm/eTDKZpLW1NVytslQq4XkeuVyOJ554gltuuYXu7m56enqwbRtFUWhvbwf80GtkZATXdXnkkUf4wAc+wOrVq5k3b17d38kHDx7kgQce4JlnnuFtb3sbd911V/gzvlwuY9t2GDblcjm2bdvGsmXLuOmmm2hubqZYLPKLX/yCFStWYFlW2DfpVGdZFqVSiZe85CWUSiWampo4++yzSafTfOtb32J4eJgPf/jDfPzjH8cwDP71X/+V8847j4svvphUKsUPfvCD8Hciy7L44Q9/yN69e7n22mu58847ef3rX0+hUKBcLp+Yfy8JIYQQ4qSheHOY45PNZkmn0+hGbMZqohNdbaNtVVNpSDXQ3t5OKtWIoig4jjNzCbrnr7Jn2y7JZJLu7m5aW9vRNb8xst8Pyqn+a67N8MhB9u3bw8TEeN0vaVNX9/O5MKWZdl3Tcw88z50Mzaq9o2qbm9cGamFvqbDneXC/+udV6pbVq+lzc5hk4bCr5c3BoSqv2tvbefLJTUc0lc/zPPr7+3nuuefo6urCMAx27txJOp0mEokwf/584vE4IyMjZDIZ8vk8LS0tZDIZzqsGQ0di48aNXHLJZeG/Dq9YsYJ58zp46KGZp4SER6tmtxsbG1m37iL27dtH5/xOhoaGcD0Pw4jQ0tzCxiee4Oyzz8asVMhms7ie64dS5QqxWIzNTz55RGN+17tu4Lvfvc1/D3geygucIFVjUFCU6oIACo5jh+/Z+nseeoW/YyFoUV/7XlSr0w41TWHxoh4+9rGP8upXvZLWtjb0aNSPZD0Pq1Jm7969PP744xzoPcD4xASVSonGxgaamlpwbIeJiQwTExmKpTKmZWM7DpbtUCqXyeVyDA4MMjo2TsW0cD0XRVEJP60zLHk526qWs+5fzc+aYj5LJpOhsbHxTzxq4lQT/J0+V9FolHQ6TTabpVwuYxhGNVifoKmpCdu2wypVVfWnuSYSCYrFIvF4nGQySaFQoFKpEI1GSaVSYaARjUYplUqA32swHo+HC4gEf78F2wJ/2nepVArf16ZpUqlU0HXd7/sWj6NpGtdffz22bdPe3s6XvvQlCoVCONZcLvcCH9ETU1Bte8EFFzA4OMj+/ftpbm6mXC6Tz+cBMAyD5uZmxsfHMU0TVVVpbm7GNM264+T3zNOwLItEIsGaNWtYu3Yt8Xic5557jrvvvvt47eZpR36uCyGEOBWddhVTEd1fJcswjMnV+GYJ1zw8XNdf+j0ejxOLxasn336FUrjWmedh2zalUilcjW8mYZP1akiAN6WnlRJMqQtWulPxPDfsZ+W6NdN+PA/HcSebWQeVT3V90Kv3rckIak9vvbCqy8Pzjk5F1FxPqJ9PwKkoCl1dXXR1dYXHcfHixdOeu6mp6Xk/xwstm83y29/eC8COHTtnvM9jjz12LIf0wpohXDmRzNbOSVEUHNdleGSUzZs28+IXv4h0UxN61PAfoWnoRpTGxjStra2MjIwSLZWoVPwTaMe20SMGRtQgYhhopoXquKieh6a6RHSdqBElHo+j6zkqFXP6OPyGV7WjOkpHQYgjU6lUGBoaCr83TZOxsTGA8M+pgiCpVCqFwdNs39c+JnjcTNsCwsqoiYmJuvsE07OD7d12220kk0kymUy4euD4+Phh9vTU4jgOjuPw2GOPhX//jYyM1N3HNE0OHjwYfu+6LqOjo7NuC/zX4I9//CM7d+7EdV0ymcxR3AshhBBCnA5Oi2BqctqdPyXHX1mvZjU+d/r0vdqvI5EI8XgCIxKt9oBSJtvD4P8iVy6XKRaL2JYV3hD0ppo1EKlmQeHS9TUhjletLlFQJ6fnaUGo5VdyBVVMdduvS54UwA1XAKx9zskv/VAqmCE4W58b/3blqPXBcRybbDZLc3PzUdn+C2FiIlN3rE3TZMmSJXUnVrWOdwymqApGJHLsArkXYHXG4yGYAKvgn9Q+vW0bB3r7mD+/k1giDkE/OE0jlUrR3t5BX18fhUIeXTewbRfLsokYUaLRKLFolEqlgmU7KK4brtAXiURIppLEM3GKxVK4GMIL+erUVlQKcTqbLeQ6HR2Nnweu684aSgohhBBCHKnTIpgKBKvxxWKxcNrRoZoOe66/ql4sFieZSBIxoqiqjn866RBMDHIch2KpSLFYwLLtmsbXHtUiJz/YqTtnr3/OySqpyR5RwS+TiqKExRRB03PFU6r3UcOx1k7lgyC8UqpT/mqStKmrBELNqfn0U+VwGt9RbDM1MZHhmmteS2Pj3KeXHGtDQ0N1Uz57e3vZsGEDsVh81sd4xzWn8fjpz+46lk93UjbwrpnoiuM47Nm7l6eefppVZ64ilUqhRbVqIZhCNBaltaWFpnQTY2Nj6HqESqVCuVwmFo9jGBEMI0JE19E0C9vxwzpN8XtNxeMJGtONFIpFvxfPSXi8hBBCCCGEEOKF9CcHU4eaqjU1KDmeFAV0XSce96fxKQrVBsSzV0u5noum+dVSsXgSTY+Aqlanxin+il4K2I5DqVikXCrjOi4ebjX8qd3/KdVQM4U81YqT2cZU16tKUdAULTzGnupNq/RSvMmpg6ri4XqTzWT9TfhhlId/32oznep19ccuCKfqqrpewH+F9TyPnTt3vWDbO1pq99hxXZ59bkf4/dRPwkw9pk5lU6v+TiZBMOx6HuMT42ze/CSXX345LS3NxCORatWUim4YNDU30dbWRv/AANFoAbNSoVwpY1km0WjUn84X0dE1BUtRcBUFRVXQVQ0jEiGZSJJIJKhUythBj7hwlU2lfkxCCCGEEEIIcYpTj+bGT6xpPQoRI1LtE6WFgc3US8DzPBRUIhGDeCJJJBpDrfaXQlFAVcKTSddzqVQqmJZVF0KEDY2nrc5XbWjM4U8+a+u5po2x+r/Je9Sf1KqKGi7LrWr+0vVKdSritEt1fxS1+v2UoSu12/de2FBKiBNB8FG0TIft27dzoLeXSrmC5zo1d1KJxxO0trXS2NhILBpD13XMikmlXAEPDCNCNAindBVVVVAVBU1TiWga0ahBPB4nEolMbXUefsQklBJCCCGEEEKcLo5qMHUiCabxRaM1vaWmBVKTFUmu64KiEIvFSCZTfn8pVau7X7Bal2lWKJVKOI5de3Nd8DM9o6sPkQ51Ijq5ht7kJWyCXjsRSZmyranjCEInRQ3vWz9OptxvMqSidnxyzkxdcifHY45O/AOlKCqO49HXP8DWp54mXyjgujWfM1VHNwxamltobW2tLooQAwUqZgXLtlAUBSNqYEQiaKoSZtiqqlZXyYoQi0YxjCiqplUD/NrP8eTn93A/G4QQQgghhBDiZHdaBFOK4jcfjsViRCIRoKb6yKs5EfQgnJ+H6p9AxvwTTz2iVwOm+obptm2Tz+cplYs1vaXmPLBZxxt+zeFOTA990jo1fFLV4OJPTVJVNWzoHlwXhlN1AVWwLTlJFnN0Ar9VZv2Uev6U10w2y+Ynn2R4eATLsgkSSAUFRVNJNqRon9dBIpXEiEbR9QiO7WCZVrhgQsQw0DUNVa1WICqgahq6rhKLx0gmk+HPoxNlyrMQQgghhBBCHGunRTDleR66rhONRlG16i7XFEnV903y+8yA4k/jiyeIRAw0VUNVgobjk9s1zQr5fA7TNOvCrrmqrYiYGkBNrbSqj4VqA6vpCUBduDUtnAqm+PnT/ZTq935QNTkFMGy0rk6GU/V9sw5vpmmS4tQ0vfqw9n154qRUs70TPQ+qHZ+wLJtnn9vBzp27KOYL4LjVR7ooqkosHqelpYWGxjSRaJSIEcFxXSzbAhSManVmxDCI6BE0VfWn86kKuq4Ti0VJJGIYkQiqWvNjWD4mQgghhBBCiNPMaRFMqapKNOqfKKpKdRU76gOTqU29FUUhFo0Rj8fR9Joe8eH9/ebppVLJX43PMsPbjoRX06/pUOHNbFN6wql2M8wp82+rD70mK6Ame08FQZUWBFQ1lVNBQFV3fI54L09xJ8MBOdpj9Oq/CN+NNW/LEyea8k2rb6yZmul5LoODg2zZ+hRj4+NYpjk5nU9R0A2DxsZGWlqaSSTiGNEogF8x5bpEdJ14LEY0GkXTNVRNCz9nEV3HiPhBeby2GnPqSp0n3BETQgghhBBCiBfenxxMzdZE/ESokgkCFV3XSCQTRKMxFEWthkEzBEFBhZBSrWqIx4lGY2iaGgYzbjhdT8F2bAqFQs1qfNWTySPpCaP4p7t+v6hqTUbN13Pc0ZqKpinNj2bsJeVP55v2tarUVFIp1UqOmQKx5/e6Hu/3wwuqvuHXFNVpX9Wpn3X3PdTjZrvfC3GZ627N8Pmd6eupr6UCKNWSI8WrfQcG/00el2PRnGvGBv/K5Gsy8yHyx+R/HiGfy/PEE0/QPzhIpVIB16+aUlBQNY1kMklbWxvJZNIPoDQN07KwbBsPMKIG0aiBruto04Ipg6hh+M3TNQ1FPfSP4hP1Z6wQQgghhBBC/KlO+YopRVGIRmMk4kk0Taf2hHjGk+vqf4ZhkIjH0fVI2PTcCxIt/Ior06xQKhaxrNpqitqT8dn/Q/G3MTlBiGkB1RFmCv7oFeoudSHJ5L3CYxMci7r4IOgxVd1e0Azec49sGt9MDhdkHo+T77rQ4nA9u2ouNQknzDROLwhplLqw5rDjOYqXuT37bCOafr/aW7wg0am5JXxuRZ1yCW5R8TwlDIpfqNf9SB4bRs0KqMG4FBXHddmxcyc7duygkC/gOi5B2IuiEI3FaGlpIZ1OE4/5q+w5jo1lmjiO44fb0Zg/lS8IpjQVXdeJRCJEDT+40jTdnyZcUyMVzJr1/zj0vkhQJYQQQgghhDiZ6Ye/y6Sw79AUJ9LJULDiXvC1pvnVUvF4DE3Twml6wTS12vt6ngKuR8SIkEqlSDakMKKGf1JZ01tKURQ816NSKVMul3FdFy2oeKg9o5yRh+v5VRee56F4/olnOG5FmR4N1PStUrzDnabO8qyeO8N14Zlv3cmvHxK4eIoHbjXEcz1c1fXDlmM8xSgI8o7a1uve00d2cj/n+840HXKGfVKmzS2re7ZZrnt+gs9C7bamFxEe7sAHYWbN14d/4vD9Vj+Ntf49+qesRnekj1UUJfyMq4panfLrMTE+wVNbn+KSF7+YltZW1EgkfPH0SKQ6na+FsbExSsUSpVKFcsUibjkYkSixWIJYvEzFtLAdN8y1guC7lCwTj0UxLRPFdsKADBR/DYaqub7PwseeQD+TxcnNMPyqvyPlui7lcvkojEjMhaZpRKtTjI+E53mUy2X5GSKEEEKIY+qIftv8z+99l0QicbTGclSoilpdISuCogSNz6unxVNDnmr+oyoKEcPAMPym50p1StvkibtfQWSaFcrlMrZlTf4Sd9hgqv7maTHTnH4XPPJfGOf+O2Z9QBXsc7Di4PH4XfVPyCfm+gxTvj+SYGqOzzDlKWZ7mDLDV4cZwRzvN8Mjw6C1/vu68Rx2GM/3xQkCqenXTR/DzBWOhxzV83jThJVfBOGrH9y1tbXSOq8TywWnbNU9xog3cOZZ57No6RlUKiaWaQH+YguaruN5HrZlYVk2jutOBnGuh+M62LZNxTSxLPuQ+zeXPa/d42KxwJve9KYj2HshZrZgwQLa2tqO+HGFQoFnnnlGAo7jpKmpiaVLlx7x4xzH4amnnsI0zaMwKiGEEEKImSneHH5rzGazpNNp+vv7aWxsPBbjEkII8Txls1m6urrIZDLyM1tME/ydLoQ4+cjPdSGEEKeiU77HlBBCCCGEEEIIIYQ4MUkwJYQQQgghhBBCCCGOCwmmhBBCCCGEEEIIIcRxIcGUEEIIIYQQQgghhDguJJgSQgghhBBCCCGEEMeFBFNCCCGEOKR4PE48Hn9ej+3q6jrix2qaBkBzc7OsQPY8NTQ0hMdR0zQaGhqO6PGqqtLW1kZLSwuKorwgY4pEIiSTyfD7VCoVjjEQi8Xo7u5+QZ5PCCGEECcHCaaEEEIIMStFUfjMZz7DrbfeGgYUiqIQi8VQVf/XCE3TiMVi4WMMw0DXdQA+8pGPsHLlSgCi0WgYROi6jqZpaJqGrutEo1EAFixYwKc+9SkMw+Ciiy5i9erV4f0NwwifX9f1ujGISZqm8eUvf5l3vvOdKIrC9ddfz1e/+tVpxzo4/uCHRoFoNMqnP/1pvvSlL/H1r3+dt771rUD96xq8frquh++H2u1pmhY+T2DRokX84z/+I5FIhEQiwb/8y7/Q0tJSN6YlS5bwiU98AlVV68amKAqRSIRoNIqqqkQikXAsgLwXhBBCiJOYfvi7CCGEEOJ01dPTQ1dXF7qu093dzcDAAB/4wAc4++yzee655/jud7/LJz7xCZqamvjZz35GJpPhrW99K7Zt84UvfAFd11FVlVe96lVcc801lEol/u///b984AMfoLm5md/85jdceumldHV1cdddd5FIJLj++uvp7++nUCjgui5r1qzhgx/8ILqu88Mf/pDh4WH+8i//ElVVeeqpp/jnf/5nPM873ofqhNLQ0MD1119PX18f119/PcVikUQiwcc+9jEWLlzIL3/5S7Zs2cIb3vAG/vCHP7Bo0SK+9a1vAXDZZZfR1dXFu9/9bjRNIx6Ps3r1am655RZisRj/8A//wCte8QqWL1/OI488Qnd3N6tWraJcLrNlyxZ++9vf8vGPf5x4PM6vf/1rfvSjH+F5Hvv27SOVSrFy5Uqam5vJ5XK0tLTwd3/3dxiGwc9+9jP27NlDJBLh0ksvpaenh7vuuou//uu/5rbbbuOv//qvcV2XiYkJdF0nEonw8Y9/nDe+8Y1cdNFFHDx4kH/8x3+kXC4f56MvhBBCiCMh/7QkhBBCiFldd911PPfcc+zcuZM3velNrFixgnPOOYePfOQj3HbbbVxzzTXs3LmTD3/4w2zYsIFbbrmFp556Ctd1ueaaawC/Aufmm2/miSeeIJ1Oc8UVV3DGGWfw1FNPsWHDBnbt2sXOnTt561vfyuOPP84f/vAHbr/9djo6Omhra+Pmm2/m29/+Np/73Od473vfS3t7O+VymU9/+tNceumlddVawlcoFPjGN77B5z//eb75zW9SKBRQFIXdu3ezY8cO3vrWt7J3714Mw+CTn/wkd999d/jYxYsXs23bNkqlEvl8nuHhYSqVCs888wye53H11VezePFihoeH2bhxI5deeimf+MQn2LJlC11dXdx444386le/4q//+q9585vfTFNTEwCWZfHTn/6UN77xjVx77bX86Ec/4r3vfS933nknt956KzfccAOpVAqApqYm5s+fj67rLFu2jGQyiaZp3HrrrZxzzjl84QtfIJlMct555/GOd7yDhx9+mHPPPTessBNCCCHEyUOCKSGEEELMqLm5mcsuuwxd1/E8j6uuuirsN9XW1kZnZyelUon29nba2tpobW2lWCziOA4bNmzgwQcfBMDzPEqlEqZpct9997Fx40ZKpRIPP/ww69evZ82aNTz++OPouo5t28Tj8bAnUvDYzs5OOjo6qFQquK7L6OgoExMTWJb1gvVAOtU8/PDDvOc97+GRRx4BYP369Vx44YVs2LABVVWJRqO0tLSQz+eZN29e+LgNGzZw1VVXce6557J+/Xquuuoqbr75ZoaHh9m5cyexWAzbtnnooYcYHh7GcRze9KY3cckllwB+KNbZ2UlbWxuu6+I4Trjt++67j4svvpiuri42bdpEsViks7OT9vZ2LMsK71ssFlmyZAmrVq0K+1KNj4+Hl4mJCXK5HJ7nYZompVKJn/70p+zdu/cYHV0hhBBCvFAkmBJCCCHEjDo7O7ntttv47Gc/y2c+8xm+//3vUyqVuPPOO/nIRz7Cueeey3//93+TyWT48Ic/TEtLC//3//5fFi5cyLJlyxgZGeHxxx9nYGCA//N//g9nnnkm3d3djI2N8eijjzIxMRGGE6tWreL+++/n4MGDbNu2jSuvvJLt27ezZ88evvjFL3L++efzpje9ic9//vP09vayadMmbNvm97//PbZtH+9DdULxPI8HH3yQfD7P5s2byefzPPDAA2zZsoWxsTHOPvts7r//fpYuXcoDDzzApz71Kc4777ww4Hvqqaf4//6//4/3ve99vOUtb2FkZISf/exnnH322ZimybZt23jiiScYHBykUCjw1a9+FYBMJkNvby//8R//QVtbG+9+97v5l3/5F7LZbDi2TCbD9773Pb7zne9QqVT48pe/zPLly7n++uv5whe+wMDAAI899hiPP/44ExMTXHnlldx7772Mjo6yYcOG8DW3LIvHHnuMPXv28E//9E+sX7+eVCpFPp8/LsdcCCGEEM+f4s2hKUM2myWdTtPf3y+r4wghxAkum83S1dVFJpORn9limuDv9D+Voih1fZ2mfi9OfnN5TVVV5a1vfSvnnHMOIyMjfPOb3ySTyRyjEZ5+5Oe6EEKIU5E0PxdHled5eJ6HoiiMjY2zv7eXjvZ2mprSuK6LZdmUSiWampro6+8nEY9jRA1SyaQ/dQSwTItcLkdrawuu6wH+NmOxmEzfEEKI42RqYCGh1KlnLq+p67rcfvvt3HHHHfIeEEIIIcTzIsGUOOp+97vfceWVVzI0PMz27c8yPj5BpVLBtCwMw6BULHLZpS/m4MEhKpUKlUoFwzCIGBFKxRILFnSxf38vqVSSXC5fvb7MNa95Zd3y1kIIIYQ4PiSUEkIIIcTzJcGUOOoGBgYA6F7QRWtLC7Zj09t7AM/z6OlZSCaTpaGhgcWLelBUFcuyaG5q4uDBg37DUwWa0mnKlQqxWIxUMkk2l0PX5e0rhBBHm6qqNDc3UywWKZVK4fWRSIR0Ok0mk8GyrMNuxzAM0uk04+Pj03pCJZNJKpVK2Pg8mUwyNjaG67oA4dTD2iliuq7T0tLC0NDQnPfjyiuv5KmnnmJ0dHROYz6ZqapKU1MTxWKRcrkcXpdMJsnlcqRSKSKRCJ7nkclkUFWVdDpdd3+Ajo4OxsbGaGxsZHx8HEVRwtdFCCGEEOKFIGf24phpaGigoaEBz/PonD8f8PtXdHV2ArBwYXfd/ZuaZu+B0tLSfPQGKoQQAvCDjHe96120t7djmiZf//rXKRaLxONxPvShD1EoFEilUnzpS1+iUCgA/s91Xdfrgp/GxkY++MEPhoHUv/7rv4Yh18qVK/noRz/Kl7/8ZSYmJnjve99LuVzm4MGDfPvb3+bSSy/lyiuvRFVVfve73/H73/8egKamJq6//nr+5V/+BVVVw2njuq5jmiaRSATbtsNKnlWrVpFOp3nDG97APffcQ19f3ykbTqmqyo033khTUxPNzc380z/9E5lMhmuvvZYrr7ySv/qrv+JjH/sYuVyObDbLD37wA97xjneg6zqtra185StfYXh4GIB3vvOd3HbbbVxwwQU8+OCDvOMd7+C73/0upmmiaRoAjuOEU+uDvlRT3wNCCCGEELORYEocc9IXSgghTg4LFixg0aJF/PznP2d0dDQMkxYsWIBt23z5y1/mL/7iLzjnnHN49NFHURSF66+/no6ODh566CE2bNgAwOWXX86TTz7JL37xC975zneybt067r//fgB27NjBQw89RCQS4dprr+WHP/wh27dv51Of+hTLly/nVa96Ff/wD/+Aoij8zd/8DY888gimaYYh1NKlS7nkkkvYsmULr3vd64hEIoyMjJBKpchkMnzta1/D8zxe8pKXcOedd/Kud72Lt7/97aRSKW677Ta2b99+3I7v0eJ5Hr/85S8ZHBzkr/7qr+jo6CCTyXD33XdzwQUXoKoqhmHw8MMPs3fvXkzTpLOzk//4j//g9a9/PW1tbWEwpeu6/49IXV309PRw7bXXUi6Xuf/++3nzm9+Mqqr89Kc/ZfHixZx77rk888wzNDU10dnZySOPPMKjjz56nI+GEEIIIU506vEegBBCCCFOTG1tbSxdupT29nZuuukmFi5cCEB/fz+GYXDLLbdw1llnYRgG4P/Dw8KFC9m/fz+7d+8OtzNv3jx6e3vxPI99+/Yxb9483vKWt/DpT3+a9vb2cGpYc3Mzg4ODOI7D0NAQHR0duK5LoVAgn8/jOE74XOAHZH/5l3/Jr3/9axKJBIODg3z5y19m6dKlfPGLX6Sjo4PGxkZWrlzJyMgIY2NjeJ7H3XffzY9+9CMuu+yyY3g0jx3P8xgYGODiiy/Gtm327t0L+JVNnufhui6PPvooHR0dfPSjH6W9vZ1CocD111/P0qVLyefzddtTFIVFixbR19fHxo0b+eEPf8jrX/96hoaG2LNnD69//evp7Owkl8tx7733snDhQg4cOMCuXbuOw94LIYQQ4mQjwZQQQgghZjQ6OsrOnTt56KGHGBgYoLm5mfnz52NZFv/1X//Ff//3f9PX18fOnTsBPxD5wQ9+QKFQ4J3vfGe4nW3btnHppZfS1NTEunXrePrpp/nFL37BV7/6VUZGRsL77dy5kxe/+MW0t7ezePFiduzYgW3brFy5kjPOOINKpVLX5yqdTrNnzx7OOussAMbGxiiVSoyOjlKpVCiXy0QiEV760pfyu9/9DgDbtslkMuRyuXAq2qlGVVVe97rX8eIXv5jbb7/dn0Lf2RlWLEciEfr6+vj973/P8PAw3d3dNDY28qUvfYmHH36Yc845Z8btBlP0YrEY5XIZ27bp7e3lV7/6FbZts2nTJkzT5M477ySTyXDDDTccy90WQgghxElKgikhhBBCzKi3t5dHH32U97///QwPD7N9+3YuvfRSYrEYF110EW9729v4/e9/T19fH+AHIueeey5nnHEGDz30ULidDRs2cPDgQW655Ra2bt3KM888Qz6fZ3x8HMdx2L17N6Ojo/zkJz9h/vz5vPe97+WnP/0pBw8e5Jvf/Cavec1rePWrX82///u/4zgOAOVymTvvvJNvfOMbtLe3k81m2bdvH6Zp8uSTT+J5Hlu3bmXRokVhtRTA9u3byefz5HI5nn322WN/UI+BZDLJ+eefj6ZpvOMd76C7u5v3vOc9qKrKpk2bsG2b5cuXc8stt7Bnzx42btzIQw89xHvf+17S6TQPPPBAuK2nnnqKUqnE5s2bqVQqPPHEE7zmNa/hJz/5CalUirVr15LP59mzZw/Dw8Ooqso555zDypUr694DQgghhBCzUbw5rO+bzWZJp9P09/fT2Nh4LMYlThGe5/Gf//mfvOMd70BVJQcV4ljIZrN0dXWRyWTkZ7aYJvg7/YUQNLp+oe9/pNs9lNWrV9PX11e3ot/pJhqNsmzZMrZt21Z3/Qt5nMWxIT/XhRBCnIqk+bkQQgghnpcjDTXmev8XMiyZGsacjiqVyozHQUIpIYQQQpwIpIRFCCGEEEIIIYQQQhwXEkwJIYQQQgghhBBCiONCgikhhBBCCCGEEEIIcVxIMCWEEEIIIYQQQgghjgsJpoQQQgghhBBCCCHEcSHBlBBCCCGEEEIIIYQ4LiSYEkIIIYQQQgghhBDHhQRTQgghhBBCCCGEEOK4kGBKCCGEEEIIIYQQQhwXEkwJIYQQQgghhBBCiONCP94DEEIIIcSJqbOzk3Q6fcSPK5fL7N2794UfkJiTdDpNZ2fnET/OdV12796NbdtHYVRCCCGEEDOTYEoIIYQQMyoUCjiOc8SPsyzrKIxGzFWlUmFsbOyIH+d5Hq7rHoURCSGEEELMToIpIYQQQswom82SzWaP9zDEESqXy5TL5eM9DCGEEEKIOZEeU0IIIYQQQgghhBDiuJBgSgghhBBCCCGEEEIcFzKVr0YulyMej6MoCoqi4HkeiqIAft+FUqlEIpGgv3+ARDJBUzqN53mYloWmqui6fzgrlQqWbRM1DCKRCJ7noapquC0hhBBCCCGEEEIIIcEUhUKB3t5eVq1axfZnn2N8fIJyuUwymcTzPFKpFLlcjoaGBkZHR3npSy/jj09sIhKJkIjHUBSFcrlCJBKhYpqkGxtxHBtQwkCqUCiwfPkyli9berx3VwghhBBCCCGEEOKEcdoHU2NjY/zP//wPq1atYt68ebQ0N4Oi0N8/gKqqxGJRooaBpmuk00tQFZVzzlpDa2sr+XyegweH6OpKUSlXcBwHPaITi0UxTQvP9XA9lwULuojHYsd7V4UQQgghhBBCCCFOKKd9MAX+ND2AnoXd4XWLF/WgKCozzb5bunQJAOl0I11dnYfdvkzhE0IIcTKKRqNcccUVRKNRRkdH2bhxI6VS6XgPSxxGMpnk8ssvD9sJ7N+/n02bNs3psYZhcMkll5BIJPjDH/7A8PDw8x7H/PnzOf/88/nlL3/5vLchhBBCiFOfND+fhaZpqKoS9puqvdSa6fZD3V+c3DzPO+KLEEKcrJqamvjkJz9JR0cH1157LZ/4xCcwDIOzzjqLNWvWoOs67e3tLF26lIsuuohkMomiKCxfvpw1a9awbNkyotEoyWSStWvXMn/+/OO9S6cFTdNob2/ngx/8IGvXrqWxsZGuri4uuugiGhsb0TSNZcuWceaZZ7Jq1SpUdfLXwTe84Q286U1vYsmSJVx++eU0NDSwePFi1q5dS2en/49x8XicCy64gAULFsz4fSKR4MILL+Scc87hVa961bE/AEIIIYQ4qUjFlDht1YZGU8Mk13VxHBfXdQEvvI/jODX3B5gePvmPCSioqoqmqdWgUkXXtep1OqrqB5e1JwWgAEHjfa/6vRBCHHuKojAxMcFvfvMbhoaGeOUrX0lrayvXXHMNZ5xxBvfeey/JZJKrr76a0dFR+vv7uffee/nQhz7EY489xnXXXce73/1ubrrpJg4ePMhZZ53FJz/5SQYGBo73rp3Sstks3/nOd1i7di0/+clPME2Tf/zHf2Tbtm3cfPPNfPazn+U73/kOv/rVr1i1ahV33HEHv/jFLwB/AZeOjg5+//vfc++993L++efzuc99jl/84hdcfPHFfPSjH+Wmm24in89z5pln8rnPfY4bb7yx7vv3v//9mKbJ/PnzMU3zOB8NIYQQQpzoJJgSp5yZAifHcaphkxOGS0HwpCiEtwePna3aKVipMZgeAaCqGopCeN3ExATFQhHbsXEch0qlUr2YOI6N67pheKWqKpFIhGg0RjweJxaLhitDxmIxotEo0WgUwzCqVXx+gDVTJZ5U5wkhjoaenh4+85nPcOaZZ/K//tf/oqenh7a2NrLZLOeddx67d+/m5z//ORs2bOBTn/oU559/Pr/+9a/5/ve/z2WXXUZzczPr16/nnnvuwXEcFi5cKMHUMfbSl76Ue+65hzvuuIN/+7d/Y8mSJYyPj/Mv//IvrF27lmuuuSYMpu655x7279/Pa17zGl7xilfwgx/8gI0bN/LP//zP3Hrrraxfv57LLruMX//615imyVlnncWll17Kb37zG0zTDCvl3vnOd3Leeefxtre97TjvvRBCCCFOdBJMiZNWbYjkui62bYeXIIAKwibX9QA/oDJNE9M0sSyr7r6O49QFUrNNy1RVlWQyRSwWxbIsisUiqqrS0NAA+P9SrShKuC1d19F1nYaGBhzHQVEUbNvG9VzMiolt2xQKeQqFPJZlUS6XqVQq4Tg0TSMajRKLxUgkEuHXyWSSZDJJIpEgFosRiUTQdX3G6aZCCPF87dixg1tuuYXPfOYzXHDBBbS1tTE+Pk6lUmH+/Pl4nodlWeHP0N/85jd89rOfZc2aNXR1dTE+Ps7TTz/N6Ogo+Xye7du3H+9dOu1s3LiRD37wg8TjcRobGzlw4ADz58/nPe95D2eddRb33ntveN9rr72WNWvWkMvlsG0bz/N40YtexI033sh5553HnXfeyebNm5mYmGDr1q08+uijXHnlleH3jz32GK94xSu4+eabWbRo0ZSKYCGEEEKI6RRvDk1wstks6XSa/v5+Ghsbj8W4jpne3l5+/OMf85GPfOR4D+WU5Hke//mf/8k73vGOP+mX0+Bt6jgOtm1TqZhYlh8wBcGU53nYto1pmlQqFUzTpFQqUSwWMU2zWh2lhNsbGxujubmZaDRKJpMJQ5/gdr+iysHzwHWdcCxBtVNtUFV7qQ2ypl7nui6RSCQcSywWQ1VVcrkcyWQyrJDSdZ1UKoVlWeRyOUZHR8lkMhSLRSqVSl3Fla7rRKN+pVUymaSxsZF0Ok1DQwOJRCKstgrGK0HVqS+bzdLV1UUmkznlfmaLP13wd/pcGIbB6tWrefLJJ0mn0yxZsoRdu3bxohe9iJGREQqFAqVSCdM0yWQyLFu2jImJCd74xjdi2zaXXHIJ73vf+zAMg3Xr1jEwMMCWLVuk/94xsnr1anp7e8NpdkuWLOGPf/wjlmXxta99ja9+9auYpsmGDRuwbRvw+0Ode+65pNNp/vCHP3D22WdzzTXXcO+997J37162b99OOp1m/fr1jIyMsHnzZhoaGli/fj2jo6Ns2rSJxsZGLr74Yvr7+zFNk2eeeeY4H4lTh/xcF0IIcSqSYEqCqaPq+QRTtSGUaZphBZFlWXXT8YLqomw2Sz6fp1IxsW0rnG4XBDyapoXbDR5r2zYTExOkUikURaFQKBCNRutCm2A7QF3QFFwfBE610+tqg6/a57SsyXEFUwmD5w72NZVKoes6nuexa9cumpqaOP/885k3bx6pVArP8zBNk2KxSDabJZPJkMlkGB8fp1wuh8cHCHtYxWJ+0+F0Ok1TUxNNTU2kUilisZiEVacwCabEoRxJMPV8RKNR1q5dS1NTE0888QSDg4NH7bnE82MYBueffz6PP/74lL6I07W3t9Pe3s62bduO0ejEocjPdSGEEKcimconjqvaAMe2bUqlUvVSxratsB9TpVIhm82Sy+UoFouUSiUURUHXdQzDqIZQGo5tE4vHcF0X0zTJ5/N4rofHZG8owzCIxWIsWrSIaDQa9nkKKouCS234FFw0VavrRR5M2Zttn4L9GhoaYv/+/WGFl6Io/tg8j2w2i+u6GBGDhsYGUim/0qlQKLB9+3b27NlDMpmks7OTjo4OWlpaaG1tDUM6y7IolUoUCgVGR0fJ5XLk83kKhQKFQoFcLsfg4GB4vIIVspqammltbQnDquBY1O6bEEIcqUqlwiOPPHK8hyEOIaiSmovh4WGGh4eP8oiEEEIIcTqTYEocc0FwEwQqQdjk97JwMU0rDFQmJiYolUqA31w8FothmiaO49Da2gZMBlpBNZSZM1EUhcbGRnp6emhoaAgbiE9tIj7TuKauzlfbd6q2/9RcRaNRenp6GBoaCvdF13XK5TLlchnw//XaNE2GhoaoVCoYhoFt24yNjdHQ0FANncaIRnfQ2trKggULaGlpQdf1sEl6Q0MDiXiCfMEPvDRNw3VdisUiExMTjI+PUygUKBaL5PN5hoaGwqmAiUSCxsZGWlpaaGtro7GxkXg8HlabSUglhBBiqmg0SqVSOd7DEEIIIcRJToIpccw4jkO5XCGbzZDNZsNeSUGwNDo6SmYiQ8WsoChKWNmUTqfDJuXghzhGxKBcLvlT6/DqprA1NjayatUqmpqawmAFmLGyaaZV+GqDqdrL1MceaVhj2zaWZaHrOpVKBcdx6OnpYdWqVWEQValUKJVKjIyMMDg4yP79+znjjDNoaWlB0zRMs0I+n+fAgT6am5tYuHAhbW1tGIaBqqqkm9I0ptOYZoVioUjFrISBU7DaXy6XY2xsjNHRUcbHxymVSkxMTDAxMUFvb2/YbL2xsZG2tjba29tpbm4Op/+BBFVCnC5isRgvfvGLMQyDcrnMQw89xIIFC1izZg3PPvssu3btCu+7evVqBgYGGB8fB2D+/Pm0t7ezdevWOT+frussX76c/fv3c/HFF4ernW7atIn29nYWLlxILpdjy5YtYW++M888kz/+8Y9zfo5IJEJDQwNjY2Phcy5btowdO3YcdlrbyaS9vZ21a9eyf/9+duzYwerVq8nlcliWxZo1awAYHR3l8ccfB2Dp0qWoqsrOnTvntP0VK1ZwxhlnUCwWeeCBB8K/o4UQQgghjpQEU+KoqO0TVSqV2LdvH/l8HtO0wqblxWIR13GxHRtFUchkMugRHSNq1PVmsiwrnKbW0NBAZ2cnekTHdT08z8WxJ38ZNgyDs88+m1QqBTDjScbU6qfa0OlwoVRtIDOncMYDFMjlcmSzWTzXQ8Ffla+xsZFzzjkHwzDCscfjcdLpNO3t7TQ0NJBMJtm7dy+jo6N0zu8kmUqiqiqu6zIyMsLY2BiNjY10d3fT3t6OYRj+lENNozHdGB5Dx3GwHRtN1Uin03R0dKBpWt2xHRkZqZsKmMvl6OvrQ1VV4vE4TU1NtLe309bWRlNTE/F4PJzqOBMJr4Q4+bmuSyaTYd26dbS2tvL000/zvve9j5///Odcf/31fOtb36K3t5dVq1bxuc99jv/zf/5PGEx1dnayevVqnnnmGVpbWymXy+ECDf39/SxcuDD8uwH8nxlvfetbGR8fJ5/P86pXvYqf/OQn4ZToP/uzP2PLli20t7fz4Q9/mH/4h3/gyiuv5LrrruPjH/94XS8rRVHo6ekBYP/+/cRiMRYvXszg4CDd3d382Z/9GV//+tcZGhrC8zwuvPBCVqxYwd13333sD/JRoGkab3jDG3jmmWe44YYb+O53v8vll19ONpvlnnvuIZPJsGLFCs4999wwmFqyZAmGYdDb20tjo//3RzKZJBaLMTw8zIIFC9i1axflchlVVbniiiv4+c9/zl/8xV8wMDBAsVhk//79x3nPhRBCCHEykmBKvKCC6W/FYpGxsTEymQxDQ0MMDQ1RLBbJZDKYphk2+VZVNWxi7nkenusRifr/mt3YmKahIUUkEmFkZIQnn3wyXEkvmUyiKOB5fq8Mw/DDrCVLlpBIJOpWzguqm2YKoqZ+P1MgdSRh1ExrCXiuR39/P5ZloagKiqqE0/uCaoCpVFVl4cKFdHV1hVVU2WyW3v295HI5DCOKYUTQNI2RkRHGx8dJpVL09PTQ3t5OJBIJG60HUyXBn3YRVD75vbliNDY20tTUxLJly3Ach0KhwPj4OAcPHmR4eJhMxq9wm5iYYO/evWiaRjwep6GhIexNZRhRYvEYiXictrY20ul0eMynHt+5HEchxInBNE36+/tZu3Yt//iP/0g0GsWyLHbs2MHVV1/N6tWrMU2Ta665ht/+9rfTHh+JRLj++uvZs2cPCxYsYPny5ViWRUdHB1u2bOF1r3sdn//85/E8j46ODnp6erj99tvp7OwMKzcrlQqFQgHHcdiyZQvlcpn169eTSqU455xz+MY3vsHLXvYyvve974XPe+WVV3LGGWeg6zpPPvkkK1euxDRNDhw4gOM4dHd3M3/+fIaGhnAchx/96Efceuut3HfffRQKhWN5iI8Kx3H493//dzRN44orriCTyXDfffdxwQUXhD/bL7vsMn70ox/VPS4Wi/He976XRx55hMsuuwxd12lpaUFRFHp7e3nJS17CV7/6VZYtW0Y2m2V4eJiOjg7WrVvHueeey7/927+xZ8+e47TXQgghhDhZSTAl/mRB+FCpVBgfHyebzYYVUQcPHqS3t5fHH3+cVCpFc3MzmqZVq6fMsPF4a0srLa0tYW8jXdfrVrnzV5nTwukkiUQC8AOOeDwermrX0dExrUpKQcFl9ukZMwUnwbaDiqfZgpSpodLUZuie51EsFhkYGMDzPHRdZ2hoiHnz5jF//vxDLpnueR65XI5yuUx/fz+Dg4Pouo6qqpTLJXLZLJFIhGjM759VqVSYmJigubmZnp6ecPpfMplE0zRGR0cZHh7Gtm0ikQjxeBzDMEgkEiQSCZLJZNgYvaGhgUWLFoVB1cjIKIODAxw8eJBMJhP2rAr2WdO0sAdYc3MLixb10N3dTVNTE5FIpDqN0e8hFjxGGq0LcXLIZDJ8/etf5z3veQ9///d/z6OPPspb3vIW4vE45XKZv/zLv+TZZ59l2bJlnHnmmWzZsiWc1vWqV72K/v5+br/9dt785jfzy1/+kmw2y8te9jJuv/12/uZv/gZd17Esi3Q6zejoaPgz3DRNcrlcuGhEPB7npptuIpPJcOedd3LeeefR3d3NGWecweWXX85dd91FNpsF4KUvfSlDQ0PYts2SJUvYtm0bV155JUNDQzzzzDNs3bqVLVu2hPtomiaFQoFkMnlKBFPgV+HecMMNbNiwgf7+flpbW8PbzjzzTCzLqguRgoq1bdu2sWXLFi655BJ+/vOf09bWxtKlS7n77rv5yEc+EoZdP/3pT/E8j+HhYe68805yuRzd3d0STAkhhBDiiEkwJZ63YHpFNptlfHwC06xgWRYTExP0HehjeGSYaDSK4zjEY3Fs22ZgYADDMEin0yxZsoSmpqawgqc2iJpasVQoFCiVSuG0N8dxUFUVx642PLdMli1bFk5zqws6lPqeUFP7Q029vu5PVakbV+2+T/1+tusOHjwYViw5jkMsFqOrq4toNFoXotU+RzB9ZtOmTQwPD5NMJkkkEkQiEVRVpVQqhQ3dXdelVPL7bRmGwdDQEKOjo8ybN49FixaRSvlVZ/PnzycWi7F//37GxsaoVCokk0mam5sxDINIJEIymSSdToePUVU1DKqWLFmMbdvkcjmGhobo6+vj4MGDZLNZyuVy2Mh+dHSUvXv3kEgkmD9/PkuXLmXBggXVxvV+dZxt2wDhKoFBU/qpx0EIcXzNmzePN77xjWzbti38udvQ0MCOHTvo7Oxky5YtjI6OEo/HWbBgQVj9GnjkkUc4ePAgr3zlK8Mp2q7rYllWWGEbGBoaYv78+USjUYDwZ5phGDQ1NVEsFvnud78bhvSf+tSn+OpXv0o+n6dYLPKSl7wknIq3efNmkskkg4OD7Nu3j46ODh588EFe8YpX8PTTT9PT08PixYtZsGABTz/9NK7rkkgkyGQyx/YAHyWxWIzPfvazZDIZnn32WRobG8PXRVVV/uzP/ozbb7992t9b99xzT9hXLFj51XGc6uIk/t9ptdVSqqriOE742srPbyGEEEI8H4p3qJKNqmw2Szqdpr+/n8bGxmMxrmOmt7eXH//4x3zkIx853kM5KQRvl3K5zNjYGPl8Ppy6NzIygqIodHZ2MjExwc6dO7Esi0cffZRLL72UefPm0dExj3S6kUgkUrc9OPSUuWeeeYZnn32WFStWUKn4AVg8Hg+fOxqNsnbt2rDHUu02juQX5ZkqoKYGUzNN/Zvta9u22bhxI/39/WGDclVVWbduHel0etpzm6ZJNptlYGCAAwcOoCoqDY0NYS+nIITyPI9oNBpWlxWLRSqVSlhZEIlEwibmC7sX0rWgKzw2lmUxMDDAk08+ycDAAIlEgpUrV9LY2BiGUYlEks7O+XVjnnoBwiq5vr4+Dhw4wPDwMKVSaVoz+mAlwa6uLpqamrFtm0IhT6lUwrJsVFUhFouFfbWCAPJIXz/hy2azdHV1kclkTrmf2eJPF/ydPheKorBq1Sp6enrYsmULg4ODrFixgsWLF7N582aGhobC+3Z2dlIoFMKqpcbGRlKpFMPDwyxbtoxcLkcul8O2bVpbWzlw4ADLli1j165d4c/Oq6++mvnz5/OTn/yECy64IJz2/cwzz5BKpThw4EA4HXzx4sVho+4g8N+9ezfgh97nnXcehmGwadMmmpqaWLVqFbt376a3t5cLLriA8fFxFEVheHiYt7/97Tz++ONH1ET9RBaNRlm3bh2apoXN4z3Po7GxkYMHD7J8+XKeffbZur/PWltbw38I6u7uplKpMDw8HPY/HB4eZunSpVx66aXcfffdDA8PA34T9F27dtHW1oZlWWE1rTg65Oe6EEKIU5EEUxJMzUkQtPgVMWNYlomiKExMTJDP52loaKC9vZ1YLEYikWBgYIA//OEPNDc3s3nzZt7+9reHfaBc18V13VlXvJsaUBUKBe677/e0tbayfMVyent7MU2T0dFRmpub0XWdnp4eFi1aVPf4QwVdRzI171AVU1P3YerXY2NjbNiwgUqlQiwWw/M8Fi5cyKpVq8KqrKDqbGBggInxCTRdw8ND13Ti8Xi4Lb8iKY/rOuEUPcMwME2Tcrlctz3TNAF/Koeu66TTaRYvXkxzc3MYKmUyGe699162bt1KR0cHa9asYd68ebiui67rdHR0hFMvg2BsajhVe5vruuTz+bBCoa+vj0wmg23b4XhbW1vp6JhHV1dnOK0kl8uRy+epVCupdF0PVwRMp9Poun7I10xMJ8GUOJQjCaaONUVRaG5uDlfMOxZUVSWdTkugMketra2Mjo4e72GctuTnuhBCiFORTOUTswoClmBa2djYGK7r4bpO+C/N8+bN44wzzqiGHQqe5wdO8+bN45WvfCW2bbNnz55wagYQBiNB+f9M2WgQQriuy969e1FVhZ27dtLc0hxOK8hkMrS0tIRTxmoDpLmEU+F1HnjM3JQ7CF1mCqZmCqdqr3NdN+xxElQiBf9i7ThO2FB8YmKCYrGIqqpEY1Ha2tqwbZt8Ph9Ob3Ndl2g06q9k6DphJVO5XKZSqdRNX9Q0jYaGBgBKpRLlcjl8Dbu7u+np6cEwDBobG1m7di1bt25leHiYTZs2sXr1alpbW0k3phkaGqJSqdDW1lY3za4uoFJUVG0yqEokEixfvpzly5dTLpcZGRlh79697Nu3L1xFcN++fTUVVAtobW2lvS1GJpMlk8mEzdf37t1HKpWks7OT+fPn1/UVE0KcmoJA/1hyXVdCqSMgoZQQQgghXmgSTIkZBf0ixscnGB8fC1fPm5iYCJfdTqVSdf2ZgulbQUBT28B8pr5OQUA1tVl57X3y+Tz79++nu7ub1tZWSqUS4+PjNKQaaGtrw3XdcDnrYHuHCqiONNQ4VMXU1FX+poZTQVUXEDYGD4K2ILAKjpuu69VG5EkA9uzZQ2dnZ93x0zQtrLoKGorXhlK1z53JZKhUTEyzQi6Xo7GhkZbWFnbv3s3Y2BgrVqwgnU7T1tbGueeey9NPP834+DibNm0ilUqxbNky2traKJX9XlZTQ6Hg9ZsWVFWrqDRVQ9M0urq6WLBgAevWrWNsbIy9e/eya9eusEdVY2Mj3d3dLF68mLa2NmLxeUyMTzAxMU6xWGR4eJjBgweJx+Is6F7AksWLaWhomPE1EUIIIYQQQghx8pFgStQJAqmJiQnGxsbCxqZBc9ulS5eSSCTCICRoZBtUAwVT9IKwJPg+2DZMhhu1q7JNvS3Y/r59+6oBxwJisSg7duxA13Xa2ttA8fuIBNPNpgZSf+pUvtpQ6nBT+aZe5zgOY2NjFAoFIpFI2Ny7s7OTUqlEqVSioaEBy7KoVCoYhkFDQwO2beO6LosXL0ZV1XAaXBAMBisTAmGvqan7FayaF1QwNTc3k0qlwjFZlkU2m2P58mXMnz+f8887n8bGRnbv3u1XDXjQ399PqVRiQVcXBw8epLu7e8ZjOPUy21Q/TdP8yqj2ds4//3zGx8fZs2cPO3fuZPv27Tz33HN0d3ezdOlS2ts7iMdjjI2NhdVkmYz/fty5YweLlyzhjBUrwqowCaiEEEIIIYQQ4uQlwZQAJiueMpkMo6Nj4ep2w8PDTExMUCgUsCyLXbt2ceGFF9LS0oJpmlTKFUrlErquk0qlwlV8gkCqtu9RbdPsmcKpWoqikM1m2bdvH4sXL6axsYGDBw+GvZP6+vpwXZempqawb9JcqqQOF2IcKnyZ7f7BPpfLZYaHh9m2bRu9vb20tbXjuk64GpJt2/T399PS0kJ/Xz9Lli5B13VisRiGYYRTFHVdr6s+K5fL4SVo+m6Z1oxhnm3bpFIpOjo6sCyLSCSCruvouk5/fz/PPfccLc0ttHe0s3XrVsbHx1m2bBmrVq2ioaGBwcFBbNsOl/weGRnFqR7nZDJ5yJ5gUwOqyaBKQ9Nqqqk0jaamJtauXct5553H2OgYO3ftZMdzO7j//vtpa2vjjDPOYN68ecTjCUZGRshkJiiVymSyWTZt2sTOHTtZs2YNK1YsJxqNSjglhBBCCCGEECcpCaZOc0HIkMvlalZX8hgeHiKfzzMxkWFiYpxSsYQRNejo6AD8/lCapqFHdHY+tZOurq5wlTzbtifDKXdyCWnTNP3H6HpdIFUXTikKCv70vn379hGJROjq6gL86Wm6rtPa2opt25TL5bDH1EyVUoermpqLmR7vOA6lUomxsTGGhobo7+8P77N9+3a2bt1KIpHgxS9+MYlEgra2NiKRCCMjI5Ph39gYE5kJLnnxJaiaGvZj6u/vZ3BwkEwmw5o1azjzzDPJZDIUi0U0TSMej6NpGq7rEolE/ADLAxSwbRvbtmlsbAwDwSDgKhaL7Nu3j3g8zkRmgkKxQGtrK5VKhUKhwMqVK1m+fDkdHR3VaYAVv/oqmwnHHovFwvfM1CqxmY5Z7VS/oKKtNpwKQ6rmJtatW8f555/P0NAQzz77LE8++STxeJzly5dXm+pHGRkZIZfzKFf8FSEffPBBdu3aybp166b1GBNCCCGEEEIIcXKQYOo05nkelUqFgwcPUqlUUFWV8fFxCoUCuVyObDZLpVIhEomw/JzlLFq0KKxOqVQq4TStc889F8dxqFQqYSPxYEqa67pYlkW5XA5XkLMsi1gsFjbUrhWEC/l8nv379rNo0SKampoYGxujXC4TiUTQNA3H8RuAt7e3h6FH7eODr2v/nPr1XI+RZVkUCgWGh4fp6+ujr6+P3t5ehoaG0HWdZDJFT89C8vk8W7du9Xs6NTYSjUZRVZV4PE4ymaRYLIbH7owVK9AjOqVyiUgkwnPPPccDDzxANpsNe08Vi0UWL14cjiEejxOLxXAch/nz52OaJtlsFtM0w+NtGAbRaJRKpYJlWWH/qvHxcWJRP1hyXZdyucxzzz3H8uXLUVUV0zRZsWIFjY2NNDY2UiwWSSaT/nLieGQyGTo6OohEIjP2E5v69dTjXzvNLwiqZgqpOjo6mDdvHuVymd7eXnbv3s2BA30sXryIzs4udH2YiYlxCk4R0zLZs2cvg4MHWbt2LeecczaGYUg4JYQQQgghhBAnEQmmTkNBsDA2NkYmk0FVVYqFIrl8LuxBVC6XUVWVpUuX0tPTQywWq2t07jgOjuOE2wymkQXXKYqK67gUS0Ucxw2noUWjURzHIZPJkEgk6qZh1QYKvb29aLrGgu4FqKrKyMiIX01Vvdi2TSwWI51Oz6nZ+VzDimBK3vj4OAcPHuTAgQMMDAwwOjoahnexWIxkMskFF1xAS0sL8XicSMRgeHiIBx54gP7+flatWhVWhmmaRi6XCxucT0xMoGp+QOM4Lp5nsm3bNjzPIxaLhcFU0FcqHo+TzWbJ5/OMj4/T2dlJpVKhsbGRhoYGent7qVQq6LoeNkUPVuOzLCscP0AqlSIRT4TbLZfLZLNZNE3j2WefZcWKFaRSKSYmJvA8j/POO49sNusHauVKXQhY2+w9mLo5UzXVTFP9aoOq2pBK13X/T01n6ZKlLFmyhImJDAcO7Aegs3M+kYiOooySz+coOy65XI7777+fwcEBXvKSl4QVY0IIIYQQQgghTnwSTJ1mPM+jUCgwOjrqV0yVK0xkJpiYmGD37t0kEgni8TgLFixg4cKF4fStoJl5UPFSWxnjOA6O7YRBim3bAGiaRj6fZ8WK5eE0PtM0iUajaJrmV/HEYjQ2NtYFHsVikT179tDR0YGu65RKJQqFAmo10AjCryAUOlS11NSva78PKpHy+TxjY2MMDg7S39/P0NAQY2Nj4Up46XSaJUuW0NzcHPaDCvY16KOl6xpjY2M8/fTT6LpOe3t7GGIFVVBBKOc4DrquY1s2sZg/Bc80TWKxGNFoFNM0MU2zbopiMG2vVCrxxz/+kWQySXd3N/F4HNM0/X1HYc+ePTz99NMsXLgQy7JQVAU8/zXKZXPs2bOHVCpFd3c3TU1NZLNZyuUyxWKRrq4uDhw4QGtrK8lkEtM0icfj4RRNXdfD4Kn2PTA1nJoppJpKURRURUVRZ1jRr3oJemMlkwlWr16Dbdvkcjk6Ojr894uq4HlZXNfFtEy2bt3K2NgYr3rVq2hvb5dwSgghhBBCCCFOAhJMnUZs22ZsbJxSqYhpmoyPj1Mul8nlcuRyOdLpNJ2dnSxcuJBkMgmA63nggapOhjlBAFTb4Ly2h5TjOGG40txUXRHOdsJKoGB6oGEY5HI5CoUCbW1txONxwG9srigKS5YsIZ1Os2fPHhzHIRKJhM+j6zotzS11zdRnCiJqr3NdF9M0yeVyDA8PV4OoASYmxjFNs1qxFKexsZGFCxeSTqeJxWJhhRgQ/hlUJwWrEQYrF86bN4/W1lba2tpQVTWcwhd8XSqVwkoqG78hfO20SFVVMYwo5XKprkotmP7W09PD4sWLKRaL4XEOqqv6+vrYuHFj2ETd8zxUVDz8242oQTweJ51OU6lUKJVKmKZJoVAgk8mQz+fDqZbd3d00NDTUBUy2NdnAPggqa4OnIJSaLaDyL8HrMvn61FZQaWr1OGjqZPVUNaDyQyr/fdna2gooeG7wnA4Vx2Hfvv389Gc/4/Wvex3z5s2TcEoIIYQQQgghTnASTJ0GPM+jVCoxMTFRDafGKBQKlEqlMJxIJpOsXr2a5ubm8DGBIIDwAKUaRNVeD6DpGoZihH2UgkqnIETSdA2rbIVN0YNpcYl4AtMy2b9/P+3t7SSTSfbs2UNnZydtbW04jsP4+Pi0Si3DMEg3pWcMpILvgyltExMTjI+PMzo6yujoKPl8nkKhgOM4RKMx2traaGlpoaGhgUgkEj426NGkqirRaJSIHgGFMHAJKp0CixYt4h3veAfDw8NEo9EwTKlUKmGVWNAbq1gshvti2zaKooT9syKRCJGITjweD4O/qf24kslkOA7wG8OPj4/T0tJCR0cH7e3tRCIRYrFYdaphJBxz8PoqioJhGKRSKSzL8l8nVcOIGkSjsRl7d9X2iQrGXNvwPmjAHjxP0PuqdqXG2korBQWU6dufrXpK13TUaojX0tqC49hhFZptOzg49Pf187O77uJNb3wjra2tEk4JIYQQQgghxAlMgqlTXLAKXKlUIpfLhSu89fb2kkql8DyPnp4e5s2bh6ZpYQAVrI4X8ABqA4WaldeCsEHTNJLJJPlcPgxgCoUCpmmiqiqRSATLssLAx3Vd8vk80WiUbDZLoVBA0zRs2w77WvX29obhT1ClpSgK6XS6bhqf50Gl4vdL8lfLG2ZkZJhsNotlWYCCqipEo1Gam5tZuLCHVCoZBlHBGIMgKplM1q1s57oujuuEY3Ach1wuFwZMyWSyWrmk0draGoZQlmWhKGoYLNVOe4vH46iqOrnCof7/s/fncZLd9Xkv/j5b7XtV79usmlWDRgKEWCQkAV6EBIgds1g4EC62f46dODexc2/ycpzrm4Q4BCeB+MK9jg02BmxsI7AECIwk0Eia0Tqataf3vbv27Zw62++Pc863q0cCBEESy3n0avUyXVVnq+o67/M8z0cVICkS0bBtD+D1F3oHt/ecVREURUVRZNLpNBMTE+K+g6mFwfZutztYlukDJM9h5P1uAIq23UyBtnmO5H8dwCMPIGWzOV72smuJRKLIwdQ9RUHTIuL+guWxHRu7D1oFH6ZpYlu2KFN/pv6pfjjV/1mWZQqFIrquYxiGKIG3bIulxSX+/u//nttvv31HJDJUqFChQoUKFSpUqFChQv14KQRTP8Xq9XoCSm1tbdHrmbTbLTEhL5FIsH//fi+u5jieI6oPSLneiD0ALj+tv9xRFQAbRVHIZDMCNEWjUdrtNnpXp9vtikl1uq57Dho/iqXrOtFoFIB8Pi8KztfX13c8Rn/vk+eGqrO2tsrKyiobG+u+K8wmHo+RSCTIZnMMDaVJpVI7QFAA14JJdgGg8lw9+L/jbYX+9XMcF9u2KJfLLCwsMDw8TCqVQtd1bNtma3OT4ZFhAVNc1yUS0UQMMXCSBaAOPFdRJpNh165dAmBd7lTKZnMoiiymHHY6XXS9K0rOgwieV6bu7Ng3AVCSpP7v+2OOz/z9M+/5bZnmJltbZYaHhwWssx0bx3aEi6n/w3FsbMcFf9t7HVxxAbqCSGTgvgrAHrBdjq547jtFUYhEIuTzBQYGBjCMnoBTuN5+PHfuPA88cIIbb3z1M06ADBUq1PfXFVdcwdDQ0A98u2azyeOPP/5de+ZCPbcaHBzkwIEDP/DtbNvm1KlT3pTdUKFChQoVKlSo50khmPopVbvdFlPcms2m6BLSdR2AdDq9o0Op3yElpqkh4XpJq+2Ti8ucVIEkWUbpi9pls1lUVaVcLqNpGrZt0+l00HWdWDQmSsxt28bpeUCr2+2SyWRIpVKcO3eO5eVlms2miJQFrhrHcXjggQeoVCo0m01M0yKeiFPI59m3bx/5fJ50Ou07oFxc1xEwKnDiuI6LaXluoiBWKJw5iuqXc0s47s6epMCBVqvVRIm5aVrU63Usy+LCxQtM7poCEP1XXjRP2+G2Ag8ctlotTNNkz549xONxMaXQc0vpYhpfsO2CSJy3K6Rn/OiP2j0TmHr6z7f/XUCppx0PEtuuqm13VaVSZnBwkADgKbLM044Q/1sXF8mxsXz4ZFoWluU5pmzHEcBK1VSikSjRaJRIJEIk0t/l5WDb2/FAWZbI5/PevnEdLNsSBfyWZXHixAkOHjzI2Nho6JoKFeqH0MWLF7l48eIPddsQSr1w2tjYYHNz84e6bbjfQoUKFSpUqFDPt0Iw9SOUADqXlUIH6j8x/n6/+0y3eTaybZtGs0mr2aRcLgv4sbW1heu6DA8Ps2vXLgEv+h9jxzJ5PxCIQZJ8QuX/pP/n/fcTxOFc1yWVShGJRFhbWwMgFovRbDap6TUkSSKRSNDpdMR0O8Mw6HQ6JJNJMpkM1WpVuKyCYnRZlqnXG5w6dQpZlnnlK19JsVgkkUh4heJ93VBB7ExVo5imKabPecXgoKpeHExV1e3ybX/amyhVd7x1C0BIcN+appHL5anVanS7XVzXRdM09u3bRzQSEe6xYP92Oh06nQ6bm5tezFGL4Pql8tFolE6nw+rqKq1Wi25XF+Xl2zBJRpYl0TfVD5/6e5n6i8I1TRNf90figtv230cAnp7pOHVd0CIaig+HQCIS0QCJVCqFpqlYlo3jmDiOLeKB3rLKO0xXAlr5x5MU/MyysW0PVumGTt1u4NgOjuvtR1VRSSTipNNpMpkM6UyaZCKJqnrrGHRJyZJXiG47HsBrNBqcPHWSkZHXh66pUKF+CIWQ4idX4b4LFSpUqFChQv2kKART30M/7Ju673a7Z/r593uMZ7MMAUgwDINmsymcRIZh0G63MQyDWCzGnj17KJVKO9xH/Y8jABXfJcDV75bq+3pHGfplN4lGo0xMTLC+ts7G5gbxeJxms4mu6+i6TiKRIBqN0mq1hKtJ1z2nUC6XQ1EUKpUKpmmSz+dRFIX19TVqtRp79uwhn88jy7KYCOg5bCKiY6nX6wlnVrAt+4vUg68DiNRf7j09Pc2jjz3Gja9+NbFYjE6nIyKKnU5HbNdUKkWhUODMmTMCkNi2TavpAcGgJDwSiQgItbm1Sb3eoNvt7HBzbS+DhCwHEEn2AZtGJKIRjUbFh6ZF0DR1R1F5sL79scDgfr1eqstKxRUVAojEdpdXEK0zTZNarUYmk/Fjc4boEMtkMjg+QPNu73qRPdvCNLfdSy5e0Xk/WFNkGVmSkWQZSbKQZEkcVxI2NhLY4Dg2uqXT6XbY2NjEcR1AIqJ5U/pyuRyFQoFkMsH4+DiyosACVKtVjF6PC+cv0Hp1m0wmHbqmQoUKFSpUqFChQoUKFerHTCGY8vWTemUxACytVotms8nW1hamaYr4XrfbJZfLsXv3bhHb2wGhvodLCln4WXY83mULIKJ+/fCnPyKmqiqjY6PE4jFmZ2eJRCK4rku71aaiV4hGoySTSRqNBr1eT0zta7VaZDIZSqUS5XKZxcVFCoUCq6trqKoqpu4NDQ0Jx1W9XhdRueBzEMELFKy/LG/DKMdxeOqpp5iammJwcBDDMPjKV77CwsICV+zfDy6Ylkk8HiefzzM0NCQgVhAH7PV6RKNRarUa8XgcNaYSjUZpNpusra3RaDQwDAPHcYX7yXNpqaJnKZjKF41GSSQSJBIJvwx9Gyj1F6AHvx+JRIjFYuLrfsfU5ZP0+vfj5cd9/8S8/n9bWVmhUqlQKBTEthXQyXe8bXdJOT7YcpBlD3hZloXRM7ztZJo4ti32wY5ic0XBlBVMy0SSbI+Q2gCSR0xlvF4y26Hb1Wm3O6ytr4PrTYaMx2I+sNOIRKOipL5cLpPJpH+AZ1aoUKFChQoVKlSoUKFChXo+9DMDpn5SwdPlCsqh+6e51et1Go2GgFGtVkv0Ee3atYvh4eEdkcEdEEqSRMm5l9ST+sexPaNDKvge/z4CEBHcd/B5JwiRyWazDA8P89RTZ4hENOIJzz3VaDSQZZlUKgV4pbkB5NrY2BCT9AzDYGlpibHRUSYnJ0jEE1iWRbPZ9BdlezqgZVo7gJTkR7wkeeeswWCbdLtdHn30UVRVxdANWu0WV199Nddeey2Dg4MCnkSjUVHWHkw5DMrk9+zZg6IotFotVldXqdVqGIaxo9vKi9spKMo2VEokEqRSKVKpFLFYTEAoAEX2QVUsKib2BQAqAFWXw6bA7RTsl6BMPPhsmiaO62D2TDqdjoBrAUTUVI1YPEY+nxedV8Hn4PEC11NwHFqW5XWS9RWYBxMBvVL2bdeapqq4ioLjOKyurrK2tsZ1111HOp2h1WqKCKMly0im53Cz+yCV67q4soskCtDAcR1sw0bXde/fnWDfe31drVbzh3vChQoVKlSoUKFChQoVKlSo51Q/tWDq2YKonxRg5bouvV6PTqdLPB5DURTa7TbdbldM3ltYWCCRSGIYOvF4nL1795JKpXYUlwet1duNQr4naqct6hkh1A6k01cG7v3K0904ATRaXVtjfm6ehYUFTLMnJuTFYnFSqSS2bdPtdul0OiQSCdLpNPV6nV6vJ9Y7cE8NDg5SLpdpt9tomibiaQGAEVCmbzJd0DfVP2Uw+H3DMJAkCV3Xufnmm8nn8ySTSYp2kcGhIRRZ4SrtfAsAAQAASURBVP7772NkZIRdu3ZRLpfZ2NhA171tnEwmiUQilMtl5ufnabfafi+SLOBT8KFpGrFYjHQ6TTrtTQqMRqPC1aSpGqqmEo/Hve0TjaH6MT3hePL3VQCYgml8hmFQLBZZX19nc3OTrc0tbMcWsbz+z8F2ClxekiTRarco5ArYjk0ul8NxHJLJJMePH/e6wrQIsF3oHkCswG0WQC/N1gSYCj6C5dN1XcQgA8dbp9NhaGiIq6++hmg0gmmatFptyuUya+trVMoVjCCmqWnYioIpmWB6zqlgm4j9GxyfuGzTVnCcn4zneahQoUKFChUqVKhQoUL9rOmnFkz9IPpxh1Ou62JZFhsbG2xsbKKqCoODg/5JfEuc7EejUXTdi7ZNTk6iadqOdZMAtx9OPVPfzuVQyv+ZuL0Po/r7mgIFsamNjQ2WlpZYXl5hc3OTWq2KJHnT00ZHR0kmk1QqFWZmZhgaGqJUKoli9Gq1iqIoZDIZZFmm0WiIx9vc3KRer1MoFIhEImxtbWEYBqlUCtM00TQNjz09PU4YuIeC8nJFUUgmk6RSKRKJhHdbPJjWaDQAiMfjKIrCVVddxdraGk8++STJZJJkMomqqqyvb3D+/HkxVlsAKEXb4YbK5XLk83lyuZyI17mui6qqohMrcED1O6uC5el2u6LYvVwu+5MATcrlsoBjq6urXHfddQJOdbpe2XoAkILH7AeKQVl6tVolFovRarcEaHIch62tLS5evMg111yDFtF2FKYHMclnmggYbN/L44CqqiJLspgQ2W63icVi3HDDqz1A6ThoSGSyCqlUirGxMTqdDhsb6ywvr6AbOhElSjQS8QrncXFMRzxHAvDouv7n4HH9fREqVKgfTt8t+htIURTxmhDqx0fPtN8U3636/d73RCIRer3ec7p8oUKFChUqVKhQgX7iwdSPO1T6UWlra4vZ2Tnyea/oudvt0uv10HVdjIR2XZe9e/cyODgoonsuOyf/CTjFdy84f6Z/cwHbcXZALcdxaDZbbG5usrS0yMrqKmurq3S7OplMmsHBQa666kVkMhnhntm3bx9DQ0PMz8/zyU9+ktXVVUZHR2m1WsTjcdrtNrqu0+12RbF1rVajZ/RwcTFNL34WFHFXq1UefPBBJiYmsG1bROIAESOzLAtAgKJoJOpNmfPjcrVajbW1NS5dusTMzCzr6+tksxle+tKXir6nVCqFJElsbGxw7tw54TQKAJQXgVOJxeNks1mKxSK5XE7E8gKgY1kWhm6QzWWJx+Oi78pxHH8inzeFUFEULMui1WoJ51gmk6FS8Tq5gr4wx3GIRCJIksQTTzzB9ddfz9TUFPV6nXK5jOu6oig9Eolsx+l8UKN3daIRr+NrcXFRxD5lfwJfo9EQv385fAq+vzyuF/RM9cf3gq91QxcdaMlkkkOHDrFnz25AQpZcvHSe5+eTJK88f2R0lIGBQZqtJmura5TLZTRNw3FcL7KJuwO60QepJCRisRixWPRH8CwMFepnT9lslt/7vd9DkiSWlpb4zGc+w/Lysvj3iYkJbr31Vj7+8Y//zPw9/kmQLMv89m//NhMTE1SrVf76r/+axx57jF/5lV/h61//OjMzM9/1tqlUin/zb/4Nv/M7vxPCqVChQoUKFSrU86KfODD1s/jGt9frsb6+wejoCPF4nE6nI6BB0KkTiUTYt28f2Wx2h1NFRPYui/Hh/+y7zSi73E3l2DaOH7krl8ssL68wNzfLwsIilmUxMFCiWCxy9OhRSqUSkUiERqPB0tISZ86cwTRNDhw4QKvVYs+ePSQSCa6++mrW19c5fvw48/PzzM3NoareIdlut6lWq2iaRiaTwVAN6o06ruNBh62tMo1GQ/Qgzc3NsbKywu7du7niiivo9XosLCwwMTFJIpHY4fbp6l2WV5ZZWVlldnaWra0tz5WkeD1Yhw4dZN++fd7EOcdhc3OT6elpdF0XDqQggqdpGqlUilKpxMDAgIjnJZNJsQ1lWeb8+fMAotQ8+PcAmi0vL/Ptb38by7IYGhpifHycRCKxA0r1dzfZtk3P8OBYpVIhFovRaDSYmZlhfHycTCbD0NAQvV5PgCRcD84FMcBEIkEmmxFON8uyfODj7IhjBu6u/hL1AOo5joOmaSIeGMCqflAUQMmNjQ0kSaJeryNJEsPDwxx70YtQVBVHFNUHrjwX2/bhluWtt6qo3tRDWWJ+foF6rYYsK8LhFbgAdhz/kkQunw8dU6FC/ZCKx+NMTU3xT//pP+X48eN85CMf4cMf/jCvf/3rkWWZkydPUqvVuO6669B1nbNnz/KmN72Je+65h7e85S24rstnPvMZ6vX6C70qP1OSJInjx4/zh3/4hyiKwr/5N/+Gf/pP/ynVapVer8eNN97Itddey9e+9jUURWH//v3s2bOHe++9l8cff5ypqSkikQhvf/vbmZiY4Mtf/jKZTIZarcalS5d4wxvewMmTJ3nTm97EpUuX+OIXv/i0ab+hQoUKFSpUqFDPVj8RYOpnEUb1y7ZtBgZKOI5Du93GcRyq1aqInaXTafbv3++5aIIbfRcQ1a+gBP27Rfccx8HQdSrVKqsrKywuLrG2toZhGMRiXhn2gQNXMDExIdxOq6urnDhxgmaziaqqlEolDh8+LGDJ5uYmjz/+OO12m0qlQq/XE2XeP/dzP8eTT57mwoXzAsB1u13hnioUCqJQHKDbtdB1nWQySd6HD51Oh7W1NaLRKNPTlxgbGxOwZGtri7Nnz4npf5ubm1i2TSIeZ2BggGKxwNjYGKlUilarxdmzZ0Wsrx9GRaNR0uk0Q0NDDAwMkMlkxCS84DNAJpMRXVkAU5NTRKKeuykWi2GaJrIso+u6cEZ1Oh3hKJqYmCASiYhta9s2tVpN/JvjOsTjccrlCqurq7RaTRRFodPpEIvFAChvlZEVmWQyKSb8qapKq9XCtm0//ukVuWcyGW86nqzguI5wPQVgqt/55bou8/PzpFIpksmkOJ6e6aM/lthut2m1WoyMjnLV8eOkUyls28F1wXFdXDdwWm33U/V6Bp1Ol1qtxla5THlri3q9QafrbauI5sUhXehzTAG4xGIxioWiAGmhQoX6wWUYhrh48Na3vpVdu3bx67/+63zqU59C0zRe/epX84lPfII77riDL3zhCxw5coS9e/fSarWIRqPccccdfPSjH32hV+NnTo7jsLy8zPLyMg8++CBXX301r3zlK5mdneUDH/gAd999N61WixtvvJGrrrqKT37yk/yrf/Wv+N//9/8d8F5Pa7UauVyOf/kv/yUf+9jHeP/738/f/M3fcOjQIYaHh8lkMqyvr//Mv08LFSpUqFChQv2v6ccWTIVvcjwFBdE9o4eiemBgbm5OwI5isciePXu8E3P/pBz6Jux9H11eaG6aJvV6nfV1r9dna2uLTqeD49hEIhFGRoYZGhoikUhgGAarq6s8+uij1Ot1ZFmmWCyyf/9+hoaHyGVzSJJEp9MR4KVYKqJ3dba2toSTKXAOlUolrr32pZhmj9nZWa/w2u8TqtfrdDodSqUS7Xaber0uXED1ep12u002myWTybCysoKmabzkJS8mkUggyzKdTodHH32MRqMhIEY+nyedTpHL5RgeHiYSibCx4fVG9Xo93xEVQdM84BTAqJGREdLpNJFIxI+JxVBVVYCcwDnU7XYF2FEUBVVTt0vMfdeR4zhimYaHh+l2u+zatQvXdVlfX6dQKIgC+Gq1ytZWmWTSm0YYuBMS8QSxWJRMJo2iKKytrZHL5TyIFouKnqvgGAlAk2madLtdms0mkUhETOdT0grYiHL1wCWmyIoATe12m9nZWWq1GqlUin379mHbNplM5mlQynVdMWlxaWmJXC7HlUePMj425i+Pg+PYfaXpJr2eB6Q8gFllfX2djc1NGvUGhmGI7ey4Lh2rQ6frdaxFtEhw8CNLMpl0BlVTse3wSn6oUD+sJElC0zQmJiaIxWJUq1VWVlb49Kc/zfj4OABPPfUUsViM97///XzsYx/jAx/4gDfEYG2Nubm5F3YFfoalqiqFQoFDhw7x9a9/nVe+8pVYlsUf/dEf8da3vpXR0VFqtRrnzp3jySefFDFrgIMHD3L77bfz9a9/nVQqxenTp3n/+9/P+973Pv7bf/tvbG5u8va3v50PfvCDPPbYY7RarRd4bUOFChUqVKhQP6n6sQNTIZDalq7r1Ot14YrKZrMCHK2urjIyMsLU1NSOoulnLKn9LpG9IGbVarXY2NhkZWWFzc0NGo0mlmUSjUYpFApMTk6QTqexbZutrS3On79AtVoBvP6RqakpRkZGyGazOxxAy0vL9Mwe3W4XgFgshmEYYnn37t3LhQsXxLIoikKv12NkeIREIsH8/Dybm5vEYjERo8tms4yMjLC0tES5XBZRxsABFY/HSafTGIbBpUuXqNVqDA4OMjs3j67romg8Ho9TLBYolTwn2tLSEltbZcAVXVSeWynBwECJiYkJisWiAFEBjAp6noJSdQF3+sqAA0CjKNtgp/9zUGA+MDCwY9JdOp1G13V0XceyLFKpFLLsxQ0ty/K6t3zH2Z49e3Bdl263K2BcPB4nlUzR1builykARaZpii6vdrtNrVYjFovR6/XIZrNial/g/pJlGVmRxfFVr9fRdZ1KpUKz2RTTAYeHh5mcnBQuKUVRqFar6LrO+fPnkSSJ/fv3c+WxYwDYtoNl2Zim744yexiGQavVorxVZmV1lY31dRqNJkZvG0i5rosbTNrz86qdToee0vNK61UFWVZA8qKwi4uLjI2Nim0eKlSoZyfbtsnn83ziE5/AcRz+03/6T5TLZZaWloSrsVwu0+v1+PKXv8yb3/xmnnzySf7kT/6E97///SSTSe69994XejV+JqXrOv/3//1/Y5om3/jGN3jkkUf4xV/8RQBuvvlmNE0TMOnGG2/k8OHDLC4uir+9nU4HRVHYtWuXcDjfc889vOc97+H06dPcdtttjIyM0Gq1whhfqFChQoUKFep/SZL7LEhQo9Egm82ysrJCJpN5ThbkhQJSi4uLfOELX+A3f/M3X5DH/26q1+s0m006nQ6yLGMYBuPj4xiGwenTpymVSoyMjAg48j3VF+uzbZtuV2ervMXqyiqrq6vUalW6XQ/aZDIZCoU82WwWVVWp1+tsbGywvr6Obdskk0nGxsYYHh4WrpxOp0OtVhPLGhRlRyIRMVXuySefJBKJcPjwYSzLwrZtNE1jdnaO229/E6dPn+aVr3wlCwsLzM7OkkwmURSFra0tLly4QKfTJRbzSr+DSFipVKLRaFCtVne8KZZlmVQqRSQSodVqo+seGAt6mlRVZXBwUMRTArdXMCEvEomQzWYZHx9ndHSUdDotYFT/9L5Op0M+n6darTI/Py+m/IFXHhuAQsuyWFhYoFgssra2hizLHDt2DEVR6fUM0RkV7B+vw2uZeDzO5OQUZ848xebmJvv37yefz7OysiL2uyzLJBNJiqUijUaDzc1Njhw5AnjHdr1W58LFCywtLdFut2k2mzQaDVqtlnCxBftj9+7d3H777ZRKJX/qoLfP77jjDpLJJGfOnOHIkSNIksT8/DynTp1icXFRQKmBgQFKpRKKorBnzx5SqRS1Wo2NjQ1OnTqFJEkcOnSYW2+9lUQigWVbWH5xumFsAymvTH+JtbV1Gk0vdikcUo4f1XNdCIKoQWG6KE6HWNwHiNEYuXyOnmHwmtfcLJb/px1ONRoNRkdHqdfrz9lrdqifXAV/05+NJEkSnXi67g0vAK97KpgYGolE0HUdRVGIRqN0Oh0AkskksizTbDafs3UJ9d2VSCRQFEVciACv5zC4eJJMJmk2m7zrXe+i1WrxzW9+U7hbg/0b/C1zHIddu3bxz//5P+ev//qv+dKXvoQsy+ICShCxD/XcK3xdDxUqVKhQP416QR1ToTvq6XIch0qlQqfTwbIs0Y3UarWo1Wq4rsvw8LAAAIFTKnCoQP929awkhq5TrXqT51ZWVtjc2kTvdlEUhVjM61cqFArE43F6Ro/NLQ8MBBPihoaGuPbaaykUCkQiEQzdoF6vs7CwgOt6PT75fJ7BwcHtqW+qhm7ofmG2wvDwMPF4HE3TWFtbE31G6XRKTL5zHAdd18UJk6J4t8vn81y8eJGFhQW63a5wPVUqFQzDIB6Pi6u+wRvoWq3mOYQkD5QFHUPDw8P0ej3OnfO6prxtEBOxvIGBAaamphgcHBSdTMHUu8BFVKvV+Na3vsXevXs5fvy42A7RqDf5LZFIiH1VLBZFbM+2ber1Oq1Wi6NHjxKJyCIWt7a2JjqyTNPk6NGjJBIJ2u026XRaFLQXCgVUVSWfzwsIFji1AhhXq9UolUpIkoyiKszMzHD33XfvKAS/XMFJZ9AfpcgKiUSChYUFer0e6XRabEMR7fO/r1QqWJZFr9cjk8lQKg14x1KvR61W45FHHgHgqquu4qabbvaglGWKqKDXcdVifX2dxcVFVldXaTSbmL0elgBS28Xm3sA914dRroBREhLIIEmyiPu5rotTcZCAL3/5y0SjUfbv3/8jeraGCvXTL9d1nzGiFThhg9dt8MB6AKXAG2IR6oVT/74IFACk4O8kwOc//3lc190Bl4L927/vL168yG/91m+J2zmOE5bahwoVKlSoUKF+JHpBwFQIpJ5ZQTTKMAwkyYtzBVAgODkYHh6mWNwuc+6P7wXqj+ctryyztrpKs9kCXOLxOIV8nvyePSSTSVFuurq66nUNaRHyhTxHjx4VE+8Mw6DRaLC8vIxlWcJRNJodRVVUjJ7BV77yFSYmJti/fz+WZbG4uMgnPvEJjh07xste9jLxhlfXdS5evMjhw4cpFArouu5fdc0IZwwgIm2u65JOpzl27BjFYpGzZ8+KN9vdblcAi/5tEBxfjuOgaF58LthmFy9epFaroaoq8XicaDRKPB5neHiY3bt3UywWSSQSxGIxAf6Ck62mHyc7ceIEruty+PBhLly4gGXZFIsFotEosizT6/XY2trCcRxKpRKAAFPeh+fuCmJly8vLaJrG+Pg4sVgM27YxdINOp4OqqkxNTTE0NAR4UGh0dFREBoMJe7VaTXRcBVfH4/EY9XqNo0eP8q1vfUucaPSrf7uZpincU65/rOzfv1+sVxBFDH43nU5z+PBhUbgewMkDBw6g6zrr6+vcd999qKrKVVddxatffSPpTBrT7Akg1Wg0vQ6a+XlWV1ZoNlv+cWBhB0DK8SfteUTKX/C+dUDC9b+XXO9/wfOn3WrjOt7UytnZGT7/+c/zK7/yKwwNDf3Uu6ZChQoV6tkoAIvfT5ZlUa1Wn+OlCRUqVKhQoUL9LOp5AVMhiPr+qlarotBb0zQMo8f8/Dxra2vs2rULx3FIpVK+E2ZnFCm4Yl2uVFheXmZtbY1atYYkS2iqSjKZZHR0lFwujyRBs9mkWq2yuLCIpmlkc1n27t1LNpslEolgmiaNRoPV1VUMwyAajZJMJhkaGhLT5Hq9Hu12G8Mw6Bk9VldXyefzfuyuI1w7e/bsIZfLMTQ0JEDTTTfd5BVVRyIYhoGmaeTzOeES6wdNQTTQcRwxAe/SpUvMz8+jqRqKqlCv13dEGoPpeK7rks1mSaVSLCwssLW1hSzLJOIJUQo+MjLKnj27KZVKolcqeOzAJdXpdHAdl3Qmzfxpr3vjfe97n5hml8vlxHZRVZVqtSqAVgCfAtDmlY17k/4URUFVVY4ePYplWQACzgVz5YLuKkmScGyHdrctbhtszwB49UOcVqtFoVDANE1GRkYYGxvj0qVLO465y8GMF6nzooVBFFNVVbHsl0/la7Vaoq/L25Yj7N27F8uymJub49vf/jaapnHs2DFuuukm0ukMhm5gGDqNRoOVlVVmZmdYWV6h6ccKbcvGdmxcv9h8G0pBEN2TJJBcD0bJwi0lOs+31w/veaIbOvV6jbm5OVZXVpicmOAd73yniGWGChXqe+sZuwt/xPevKIp4HXw2UlV1x+9/v2UMXrd+VvVM2+eZtslzua+f6QLSs1Xwtye4eBUqVKhQoUKF+unScwamfpbfAP4gCiaV6bqOpmk+lDJEgfjY2Jjf46GQTqcFIOj1elQqFdbXN9jY2BBF4LIskUgkGB8fE7GvTqdDtVplbW0NgHQ6zejoKPkjeeHQaTQarK2t0fUjfl45eJF4PC6cW0EZe7BszWYTwzCQZZnjx4+Ty+W8qWxIrG+sMzo6ytDQkIjSSZJEr+e5ZYJIXxCVKxaLrKysiBOU4A1zv3MKvH6Mw4cPMzY2xqOPPkqr1RLT7SRJEnG6aDTK4OAgW1tlLl16HMdxiEa9jqpEPMHwyDD79+/fAaQAWq02ElAaKIlogxf1izI3N8ujjz7Ka17zGlRVZXV1lcHBQSKRCJ1OB03T2Nrcot1pI0mSKCEHBOCxbVvAr37YA4ioWhBHDMCWKEtXtn/PsbcnAAZgChBgJgBqQaH5gQMHmJmZ2RHn65+cFxS+B7ft//fg94P9EgC7oG9m165dDA970xo7nQ6PPfYYjz/2OCMjI1xx4Aquu+464vE4nU6Her3O8vIyly5dYnllhVazSc93ajl9k/b6t0WgILqHgFKXSdrxmz6Y1Wi1mly8eIF6vU5E03ji1CluueUW8oVC6JoKFer7KJlM8lu/9Vvib8CXvvQlZmZmfuD7efWrX83y8jIXL17kLW95C4888ggzMzPcdtttXLhwgRe/+MV8+tOfftbL9Eu/9Ev88R//MQCFQoFf//Vfp9PpsLGxwZe+9CUqlYr4/d27dzM0NMSJEyd+4OX+SVUqleJ973sf586d48yZM3zgAx9A13UeeeQRpqenueKKKygUCnz+858Xfz+OHTvGLbfcgqqq/MM//AP33Xff93yMfD7P7t27RVz7e2l8fJx3vvOdDA4OUq1W+epXv8rJkyef9fpceeWVlEol7rnnnmd9m1ChQoUKFSrUT46eEzAVQqnvL0mSaDQaTE9PI8sKuVwWRVHQdV1040xMTNDtdtE0TcSwLNvm3LnznD9/Hsuy0DQVx3H86XiTRKNRbNumVqsxPT2NYRgkEgmGh4fZt28f6XQagGazRblcFrG4oCdqZGQE8HoogjJqwzBotzu0Wy2MnoEkSaTTacbHx0VJOYCmRXAcL0qWNbKYlrnDgQSSNzmt501Ou3TpEpOTkzSbTVRVxTCMp8XyVFUVIAS2+5SGh4f5xV/8RT7/+c8Ll1nwu6OjowBcuHBBuHqSySTxeJzBwUH27dvH8PAwyWRSOKsqlQr33XcfZ86c4Vd/9VeFcyyAha1Wi4ceeoiJiQkOHz7ME088IW7vwTEVXW+yVd7ytoWqkSglRBxQVVUxAS8ejwvQE4AnWZZ3TPELnkOu6yJLct/PvW3gSu4Ot1SwbQLQFOzDZDJJt9vlwIEDPPHEEyKGmcvlyOfz5PN50um0KCkO+rtM00SWZRHj85ZXFfcdj8dRFIWhoSEOHjxIJBJhbW2N+++/n83NTQ4eOsiRI0c5dOgQAFtbZRYXF5menmZ5edkvXjexbC866NgOjvv0HinvqJF2OKG8n7H9M0kSP5Dw1l9VFdqtFqcvTVMul0mn0wwPD5NOpYj7XV75QuEHft6GCvWzpmg0iqqq/OEf/iGTk5P80i/9Ev/hP/wHXve615FOp7nrrruQZZmf//mfxzAMvvzlL3Pw4EFyuRzlcpnHH38c8FzBr3nNa1haWuKWW24hk8mwvr7Oi1/8Yp544gmOHj3K+9//fqanp7nvvvs4evQo11xzDU8++SSPPPLIju8vXrwo4s3g9frpus4nPvEJjhw5wq//+q/zkY98hJe//OXE43HOnDlDu93m5S9/OadPn8ZxHI4dO8bs7Cw333wzm5ub3HPPPT+QY+vHXb1ej4sXLzI2Nsbm5ibLy8v83d/9HfV6nWuuueZp02OHh4d5xzvewX/5L/8F0zQZGxsjFovxute9jmQyyV133cXAwAAHDhxgdHSUu+++m6NHj3L77bfzR3/0R6ysrPDa176WcrnMPffcw8te9jKSySRPPvkkS0tLrKys8PGPf5zf/d3f5f/9f/9fut0ut99+O6lUiq997WuiSP3ChQu84hWv4Dvf+Q5XX301x48f54knnkCWZV760peyb98+7r//fubm5rj11lsxDK9KYGxsjBtvvJG1tTXuu+8+rr32WuLxOPfddx833XQTiqLw93//9wD8/M//PK7r8vd///ccOXKEkZERSqUSX/ziF3cAzVChQoUKFSrU86cfGZgKYdSzkyRJmKbF4uICly5dIpPJMDLiuXN0XceyrB3RtCCuIMsyp0+f5pvf/AcKhTx79uzhwIErvKJqRaHb7bK6ukq73Rbl2AcOHKBYLAoXVqvVYmlpyYvPRSJk0mkGBwfF1L+g46pn9Gh32mJqG3jgamBwwAMYioLml1976+N1HK2trXNx+iIXzl9gdnYWSYIPf/jDTE5OUiwWicViZLMZdF3HdV2qlSp79uwR8bGgMylwSW3HGr1/7y96z2azImJoWZaAc6VSibW1NVZXV3Fdl2QyKaDb/v37GR8fJ5PJCIdUpVLhH/7hW9x5552srq7wL//lvxRgK4A8tm3zxBNPsL6+zq/8yq+wuLiIaZrEYjFazRaKquC6DltbW2I5g/XoB1O1Wk3s42Cf9vc2BbrcKRQAGC8SqGCajigED1xT/beNRqNimziOQ6FQYGBgQHSGBRG9AIAF99NqtajX60SjUaLR6I7l8MCULJZ1bGyM1772tWIC12OPPcbJkydJp9O89KUv5fDhIwwNDdFsNpifX+DChQusrHiRPbNvEqDtOCK2503bg/5pe953bh+ckvpAVN+2QUJVFBRFodGosbKyQqvVIhaLsW/fPsbHx0mlUtiWRS4WwwrjIKFCPWsNDw9z0003MTU1xfT0NK997WspFossLS3x3ve+l//5P/8nS0tLHD9+nBtuuIEDBw4QiUT4n//zf/La174WSZJ44IEHeOtb38qxY8e49957mZiY4Pjx45w7dw7TNEmlUnzzm9/kwx/+MPPz87zzne/kL//yL3n7299Oo9HY8f0nP/nJpy2jaZo0m00efPBBfvEXf5GhoSE+9KEP8Z/+03+iUCiwe/duarUar3zlK8Xr92te8xq+/vWv85KXvIRarcaDDz74Amzd50a9Xo+NjQ2Gh4fpdrvs3buXD3/4wzz66KN84xvfEH+jAu3fv1/8nQPvb+Mb3/hG0uk06+vrvPvd72ZpaYnh4WEeeugh3vWud/HlL3+Z8+fPMzMzw6/92q/xrW99i6uuuopOp8Nb3/pWvv3tbwvQ4zgO7XZb1AC86U1vYm1tjfPnz/OP//E/5p577mFoaIiZmRluvvlmVldXec1rXsOnP/1p4WxWFIV7772X97znPXzxi19kz5493HnnncTjcX75l3+ZP/mTPxEXWz70oQ/xB3/wB9x6662iduBtb3sbsiyzurpKIpHgtttuY3x8nKeeeoqNjQ1uueUW/uzP/uwF2V+hQoUKFSrUz7p+JGAqhFLPTo7jsLa2xvT0NJubmxSLRUZGRnAcx4Mylo1hGExPT4vOHlHujcSlmRkuXLjAvn37OH711cLZEsCJiYkJBgYG0DRNFKAvLy+LzqClpSWSyST79u0jGo3SbDbpdrvYtk3P6NFsNWm1Wti27UOkrABAEhKS7IGM4L7X19eZn19gbm6OlZUVOt0urWaTTrdDp90W0b0g4tfpdMhkMuzbt4/HHnsMx3WIx+OiMDyIp8HOUvdg/YJ+iSA6NzMzQ6fTQZIkdu/ejSRJTE9PU683iEQ0YrEY6XSaXbt2sXfvXnK5nHAr1et1vv3tb/PlL3+F6elp9G6XW2+7lZe//OVYludYcx0Pgs3NzfGd73yHV7ziFdi2zfr6OqVSiV6vh6J6PVHBZLoAEgUj1YMYoqZpFItF8vk8lmXtcEsFcOhyUOQ4zo6on+cEUgX8CrbTNpiSRJQz2H5ewXuMaNRzhi0uLorYoaZqSPL2Ng56zgJ3WTACPlA/RAvinjMzM5w8eZJKpcL+/fvZu3cvu3fvwXFsTp8+zfnz5z1I1PYgpwektsGa6zieL0r0SAVrwjafkqSd5ihpuz9KQkL2+7A6nTbVqncSNDw8TCqVIhaLeds+nyelqvRsG8V2kMMIX6hQz1rdbpd6vc6BAwf41//6X/PmN7+ZZDJJo9Hg/Pnz3HrrrSLenc/nMU2Tu+++m62tLer1OpIk0e12mZmZ4Z3vfCcf+9jHuPHGG3nrW9/Kf/7P/xnXdZmZmWF2dpZKpUIul6NYLLJ7927OnDlDLBbb8f0zOZuCmPHAwACRSIR2u83Zs2d54IEHuPrqqwH4zne+w2/8xm/gui7/43/8D37hF36B8fFxNjY2fqqnyy0vL/Nv/+2/JRqN8tu//dvceeedOy5oAKytrfGqV70KTdOwbVs4qDVNo91uc/HiRWKxmHhdf/3rX+8NBWk2abfbDA8PMzY2xtbWFpVKhUqlwle+8pVnnAwI3mv0N7/5TdbX18WFkmDoSDQapVAosLKywtLSEuvr61xzzTWcPXuWixcvYts2586do1Qq8ba3vY3Pfe5zmKbJ3Nwci4uL5PN5Hn/8cU6ePMlNN91EvV6nXC4zOzvLq171KvEeZnFxkYGBAU6fPk00GuXmm29+PnZHqFChQoUKFeoZ9EODqRBGbeuZtsXlJZ/r6+s88cQTXJyeplatsm/fPnbt2kW73d6OqdkOtmWTzWYpFIoCSgXdROlUisnJCY4cOcK+vXuRZZn19XXabc/dFI/HhRsniAOCBzQWFhb427/9W6699lqKxSLNZpOvfOUr/NzP/ZyAG8lkkqmpKXFlsh+UdLtdyuUyKysrrG9s0Gh4EKtWq9Fpt+n1zD4Ti/dVIpFkZGSEWCxGpVLhzJkzfOc73+Fd73oXm5ubgNcVEkwM7Jle4fvjjz+ObdscPHiQG264QYCaSCQiCr2Xl5c5deoUiUSC0dFRtra2WFhYwLZtkskEiURCxMyGhoZIpVKifPzUqVPceeeXuXjxIu12C8PQ2btvL2984xv9AnAdRVHQNI1mo8m9997L4OAgV199NU888QSZTEbsk2g0iq4bdLtd4bAKtnvQreW6LrFYjJGREfbv3w+wI84XlKAHJ1au0+9SUlEU2Yttql6ssH96YX9RrQdtZFKpFJOTk8TjcVRVpd1uU6/X2bdvn4ByyWSSVqtFtVoV8CtwrTUaDVHYHkDN/oigZVnous7c3BwnTpxA0zSuvfZadu/eQzwe49Klac6dO8fKyqq4Qh4ckwJIuX2l5uLps+2UEi6pbbsY/f8FscN8Ps/4+BiJRIJHH32EdCYNPsiNxqLENY0kMAwMpFKs6jqrS0tiXUKFCvW9Zds2ly5d4t5778U0TW655Ra+9rWv8Y53vIPh4WFOnjxJLBZjdHSURCIhhkx0Oh1s2+ahhx4S93Xfffdx5ZVXsrCwwAMPPMChQ4dYXFykVCoJMFSv11lZWeHUqVOMjIzQ6XSYnp4W33sDJJrUarUdy7h//37++T//50QiET796U/T6XRYX18XXYEBRFlaWkJRFLa2tvjSl77EgQMHME1T/E36aVKv16PVarF//35uv/12er0eDz744NOgFMClS5eYmZnhd3/3dzEMg5mZGb761a/y1re+leHhYR588EExSddxHGq1GlX/vczVV1/Nl770JY4cOYJpmqL3MrhA069gguxXvvIV7rjjDjqdDg8++CBnz57ltttuo+BHrM+ePcv111/PP/kn/4RyuSzimMF9TE1NsXfvXkzTpFKpMD8/z2/+5m9iWRZ//dd/zcbGBgB33nknt956K6qqcvbsWf72b/9WxBMfe+wxGo2GcDA3m83ncG+EChUqVKhQob6XJPdZEKZGo0E2m2VlZUV0FP20aHFxkS984Qv85m/+5nf9nR8WwlmWxcbGJmfPnmFmZoZer0c+n6dQKHDs2DFs26bZbNLr9cRjBBAoiPIF0KDb7WKZFu1OW7iZgphZUGze7XaFY6q/uDpwaj322GPs37+fVCqFJEksLy9z6NAhSqXSjq6ooOy8VquxvLLC+to6lWpVLKcsSUiSF/+rVqsi8mdZJo1mk263Q7vVZmJinE996lP0ej1mZmaQZZkzZ87w2te+lgceeIBms8nNN9/M4OAg5XKZcrnM3/3d33kOJl3nuuuu4x3veAeNRoNutyv6qpaWljh9+jTDQ8PEE3Hm5+fZ2NhAVVUSiQSZTIa9e/eKiYCRSATLsrl0aZq77/4qjzzyiIjVGYaOpkX4jd/4/3H48GHhsgmgzqlTp/jOd77DG97wBqLRKLVajUwmQ6/XE3HAra0tdF33XF+Og207gMs111zD4OAgy8vLZDIZ0akyPDwsOrFs2xbxvmDbG4YhtnVEi2A7Nmtra2SzWSzTotPtiIhlu93GsiwGBgZIp9Pi2FBVVYBDD9Z5kcbAnRYUly8seI43SZKo1Wq0Wi0s0yKXz4nY3z/+x/+YVCrF/Pw8pVJJlOXH4962Dzq/FhcXOXfunIiUBkDKsuydMEoAqe1Ze4G8tJ4konuBKyoAY9FolHwhz/jYmHAIJpNJNE1jZWWFBx88webmJlFZJuU4DCeTpBMJrw9Mkjhx+jSGbfOhf/2vGRgc/KktP280GoyOjlKv18lkMi/04oT6MVPwN/3ZKhKJ0Ov1xIAJXdf9oRARms0mkiSRyWTEa5kkSWLCar/6b3/51wF0D/r9gi7DTqcjOu/6vw+WKbjfZDIpIFTwehr8TuBO9XoZvWmcATRJp9NYliV6HH+aFMTFTdMUf+Obzeb3fE+TyWREB2ZwUSXoWQwutAQXinq9nojTG4axY1v2759+RaNRDMMAEH9nAyAUdBcGE2JVVSWdTotlDo6raDRKr9cT7uDAOZ3JZEQ/ZnA89d9vq9UCEINJWq3WjuMteOwfd4Wv66FChQoV6qdRP5Bj6qfZJfWjWrdgWlm73eb8+Qs88cQTdLsdkCRi0SgDAwO85CUvEW/i47E4jWaDWq0m3vgHb5zFNDb/sxbRUAyFTCbjQSTXKzFvtzvs3btXXCHsn4DXbHrOJsdxeMlLXkIulxNXBo8ePcrQ0BCRSATHcajX66yvr7O8vMLm5iadThfXdZBl7/4ikQiKrCArXv+TZVtEIhFisRiWZeLVBDm4jovrOqRSKTKZLGtrq1x55ZVMTk7yute9jrm5OQzDIJ/Pk0wmRfF5t9tlYWEB8ErPx8bGRLm34zi0Wi1OnjyJaZrs3bsXXdc5c+YMnU6HRCJBPB5neHiYgwcPinJzSZLY3NzkW9+6j1OPnGJtbY16o4HRM+iZ3pvWX/zFX6DRaPDwww/z4he/GNu2SSQSLCws8Mgjj3DjjTcyODgobP96VycaiYLkvUEMTpoikQi2baMoDsViUcDDIMpnmiZf+9rXePvb3y5KxfsnD3q3VUTBu2ma3nL2eiiK4oGluEw2lxWAZ35+HsMwuOKKK0R5frVa5dKlS+KkIpfLoaqq2M+B46nb7ZLJZNi1a5dwUa2trQlXU7vVxop58cRGo8Hm5iapVIpz584hSRJjY2Ok02m2tra47777mJ2dFUDKNPscUq7jlZn3Aan+Kin8T674ahtKBTCqWCgyNj7G2OgohUKBRCIhiuk1TUVVNfbt28fQ0BAnH36Y1dkZCrZNJpEgFo/T6/U4feoUy7bNiB/1+WmFUqFC/agVnOAHXYgAuq6Lr13X3eFg+m7qv/3lXweP0f9Y/fG64G/U5csU/G4AHZ5puYPXNOBp4OGn2SXTv96B2+j7qdFo7Pi+fz/3Ryj7L44E6t+WzwSlAAGlgKfBwMu/tyyLarX6Xe+j/7EvP176H//y++2PF/Yfb8/kJAsVKlSoUKFCPT96Tqby/bjquQJrwVVar2vB6++Jx2Osb6zTbrdQVJVEPE4ikeDYsWPC0aSqKolkAheXZrMp4k6RSESAiqCcNIgejY2OgYSw0oNLLBYlHo+Tz+dZXVmlXq9jmSaRaJRUKsXw8DDxeFx0CklIWIMenNja2mJtbV10gXR1HVmSkWUPkAVXXGVFFi4WRVGQALNnoqiK7+pS8Eepie0cj8fZ2PD6I1KpFIuLi2xsbNBut2k0GgwPD/tuJoter0e1WqVSqRCNRgEYGRkRV043Nzc5c+aMACkbGxvMzs4C3lX1dDrNnj172LdvH4VCQfQjnTr1CPf5k+JqtZrnUPPL1B3b5ujRo9xwww3iaqlhGAwMDHgOs8cfo1Qq8aIXvYjZ2VkBoJKppChwDaBfcBzIskwqlaZQKAi3WwCmFEXhmmuuEfs3iKQ1m002tzbpdrqi8ytwUsXjcQDyuTzr6+vCURWPx4nH44yNjeG6Lk899RSra2uMjowwMDDAoUOHRKQmAH+tVotOp0O328VxHBEnDJxNuq57bjokdENnZWWFyfwkrVYL13UplUo8+eST/MM//ANXXHEFs7NzPsjyOsl6pkm73fY7pGycyyfs+U+/HfG9HXDK742SZWKxKMVikbGxMUZHRsjlcsRiMVRVFW4zVVPRVP9rVUFRVAYHB/j5X/h5pqcvcfqxx2gaOg3D4L6TJ5lfX2e4VKKEN0EyVKhQoUKFChUqVKhQoUK98PqpB1PfD0b9sLAqsLM3m02qfswtHo+TSiXRNA+2NBsNEcmTZZnJyUmGh4dF/Kq/c0dRvF0RFKH3l0z3F4H3zJ5wUMXjcer1uujkCKzsQ8NDwn6/7QpxcRzvCvXG5ibTF6epVCriKmHg0opGo8iSLLqPgsJzRZZRVQ1FkXEB27JQVc+1FVy9dMX29IBEKpVC13WKxSKdTof5+XnAu1rZbnfIZrPCSeS5bEwGBwdRVZVCocDIyAjtVpsLFy4wNzfH1NQUmUyGubk5VldXiUajJBIJisUiBw8eZGJignQ67cf9lvna17/O9MVpDMPwIFKzieFH2GzLIp/P88Y3vhFFVrxGbSRSqRSdToeLFy/SbDT5uZ/7OdrtNmtra+zdu1e4jiRJolKpCpdbAO28kvPCjoLzwAUXiUQ4cOAAsVjM2y8SyJJMLBbzIJ+ioKkaY2Nj5HI5dF2nVqvR7XYxLZN9+/YJABqMcC+VSkSjUUzTZKtcFk6iXq8ngJRhGKytrWEYhtc95riomgcLbdsWXSGDg4PieDR6BplMhoGBATKZDPPz86ysrHD69FMMDAywvr5Bu92i0+nS7XZwHJeBwQGisRjNRnPbJRXASu9Js/0EkqQdw/c8GOUVHI+PjTEyMkImkyEajYoOruBDURUfSCkoqoqqbP88mMx37Ngx9uzZzRNPPMF3vv0dFisVYskkercL/u+GChUqVKhQoUKFChUqVKgXXj81YCqAN8913DDogKpUKsLynkgmyeXyvivGczaZpilcKCsrK1x55ZXs3r1blJKLaXs+OJIk2NjY4OzZs2KizdGjR3nVq161Y/10XRcThBzHEZ1CIyMjRKNR4czxltWl1zO8iXmdLuvr62J6nPeYEqqmocgysqJg6AZRTUWRZRy/QFpVFS/2JHvxPcePmymyjCTJKIpMNBZFVRUMQ/fjV95tDh86TCqVJpPJsLCwINa13W4TjUbIZrNIkiSASSaT4V3vepdwxUiSxEMPP0S1WuXAgQPIsszZs2dpNBokk0mSySRjY2McOnSIwcFB4vE4uq7z8MMnue/++6nX6l7fhd6l2fAmEJqm14WlKCq33HILw8PD4piJRDRyuRwXLlxgY2ODPXv2MDAwwBNPPEGhUCAeT6CpKpIssba2RqfT3rEPVVWlWCwSj8fJZDLEYjHx74H7rNfrkU6nBbAKnHMjIyOUy2XRYxW4sILbqqpKrVYTriJd10V8JoBJ42NjSJLE1tZWX++XJaYF7niOWLC5uYllWWQyGRKJBKbpwbFIJMLQ0BDxeJypqSna7Tarq6uYpsmBA1dQr9dZXFwUQFHXvVikVlUZHR1DUzW2ylt+dG/bKSXkby9ZlojGYpSKRcbHJxgZGSaTyYjnkSx7kMkDTuqOz5qqesXw/lRED04pAgbKskwul+flr3gFg4NDRKJRTp8+jW1bFCanxHMkVKhQoUKFChUqVKhQoUK9sPqJB1P9IOq5jOpZlkWtXmfTj6JFIhFSqZSAQarqTU8LTvx1vUutVmNzc5NIJML4+LhwpASl4cH0O8dxiEQiouMplUpx6NAhdu3aJe4vgB+maRKLxSgUisRiUQE4AldW8AGg6x3+4R++xfy819ukKF6sK5PN4tgOkWjEX24F27KQJckzsUgSUU3zIIwkec4u20HTVLR4XMTPJFlC9h0qtl9067geJEmn07z02peSyaS94na/dNYwDBqNBqlUing8juN4zjNJkkgkEt56uojOJ9d1OXDgAL1ejzNnzmCapoju7d27l/3791MsFFFUhdW1Ne6+627Onb+AZZpYtkXP6PkRS2/6nmmaom/r2muv9d1qir9NCywtLbGxsUE8HqdYLAqXVzwep9vtICeT2KYtAF//tk+lUgwNDZHP52m1Wsg+zAvikKqqMnhZ4XYQH+x0OkQiETRNI5FIYNs23W6XTqcj3FcJv8Tb7myXiQcT+rrdLoZheJE6/2fBtCKAVCq1vbyOdzybpin6tHK5nDjOZN+9pCgqAwMltra2hLtP0zQGBwdZX1+n0+kIh1wum+VFV12FbVkkkwkkWWJjfeNpUEqSZWLRKKVSiYmJCUZHR8WUQ++xvShf0GWmBHCqD1AFsb1t95QqfkeWFXH8dzptZmfnOHf+PKl0muNXX+0dTwcPhlP5QoUKFSpUqFChQoUKFerHRD9RYOr5Ll93XZdOt8vmxgYbGxsCCqXTaR9IRfziZVWc6AZ9SPPz86ytrQEQi8V29CkFk2XW19eJRCLipHzXrl2MjY0JWKSqKouLi54bZnzcm9IWiQiA0Q+lgk0TgCnXdTl79hxLy8viZ6qqEolGiEWjmJZFPBYTRem6baP6MErxl8exbSzbRlNVEamSZBnLBxoBENG7uj9JzsS2vJ9nszmGhoZEt1TwOJZlUa/XyWazvqPIod1u0+l0iEajOI7D6uoq586dY3BwUEwWO3/+PIqikM1myeVyHD50mKldXrTPcRxOPfIId/393WxtbfmT8SyMXo9mo0Gz2UTXuxi9HrZlMzI6wu233y4eb2hoiE6nw+rqKhsbm5imyfDwsBghnclkqNfrYuperVYT8cUAEAUgq9Fo8OijjxKNRnnd614nesKCY6R/WiJ4EHBxcZH19XU0TWN4eJhUMsXGxia63kWSJNbX18nlcmKCUXC7oDgdoGf0RKmvaZreMeF4Ux01TRPHSK+3PbHKNE2ifiH/0NAQ0WhUAC7LsjEMwytBb7dRVRXT9NxX6bQ3YbDVaqFqKqqmEYvFyWayTE5OcuHCeRRFxTItyuWymL5VGhhg165djI+Pkc1kd7iWvGijhOyDseDDA06ee0pVAgC1DaJUpR9KycJNtrKywszsrF/i742tDxSLx8Li81ChQoUKFSpUqFChQoX6MdFPDJh6PqGUbduit2lrqwy4xOMJH0jFiEYjAkptAxebWq3K5uYWzWYDXdeFQygWi5HP50XkSpIkCoUCCwsLrK6uUiwWkZBIpzM06nVs2xHOlWBkcrAN+p1R/ZBDUbyvZUkGJMqVMtOXLqEqKq7iek4TTSUWjfoTyWS0SMSP5EkCQEmyjONDJ01VSSSSXjzR70OSJAnDHwnebrcxDAPTMr3JPYYHNUyzh6apxGIxGo0G9XpduICCovixsTEB0Gzb9if7WczMzDA/P8/4+DhDQ0Osra0xNzdHJBIhmUwyNDTE4cOHmZiYIJFI0G63+Zu/+VvuvfdeOn6pt+O7hbq6TsefEheAtEgkwlvf8lZKxRK24zm1NjY2WFxc9EeXd8mkMwKi2bYtYGKlUhGTDvsLz4Nj4Pz582JU+cGDB0XHUyQS2QFOtmGity8bjQbVapVUKiWK5hcW5nEch6NHj5JMJv1C/bgALKqq0ul0xEQh0/J6yYKC9k67QzyRIJ/P02w2RfwxcKYFy9zpdP3703aMEg/WW1EU0um07+xaFw66UqlEuVz2RsI7DrrhTUis12v+OHCdkZER0pk0U5NT7Nq1i1wuJzrTvA6qbaDqdU1JngtPkpFkyXNNyduRPlmWkZXLgZUspkYavR7lrTKrq6tUKlVa7RaWuR1jxPUeIx4LwVSoUKFChQoVKlSoUKFC/bjoxxZMPd/uqOCkfmtri8XFJWq1KpIsE4/FicViRKNRotHArRRB9buYvDHVbSqVMs1mE9O0BDxIJpNYlkUsFhPl0v09WJFIRMTSACzbQpZz1Ot1ATH27NnjuZf6xhgHJ9XB7wT/JsuyX1bu8NTppzyopalISF5JtN8XFbinVFVFwtvWsWgMx3WwTK/UPJFICCdXf8SrZ3oQqlav02q2MIwehq7T6bTpdru+I8cmEvGKyU+dOsVDDz3EG97wBrFdWq2WmFoXACPTNHnqqacol8vs3r2bgYEBZmdnWVlZ8UvlU4yPj3PkyBFGR0eJRCJ0u12Wl5dZW1+jXC7T6XZ8MOVgO57jp2f0cFwHWZJQFJnXvOZmrr76uLe9e54DKOpPL+x2u54rLRrZMaXPcbztUq/XGRgYoFarCagWRPiCD6/HyqRYLO6IaUpIAiL194AFTrsANrZaLTRNQ9d1Wq0Wtm2Tz+dFlC7YFwHkDLappmlUKhVyuRyapvn9Z64owQ+m+0UiEZrNJslk0j+WPNdXs9nAtm1mZ2exbZtUKsXg4JDoAZNl2Y84JrAsi0KhQCwWF2CuZ5o0mw3OnWuiaSqZTJY9e/awd+9eSiUPAjqi7N9FduXLOqi8JnRvKp8kIGj/h+z/fKejSsayTBqNhgej/MmDtm2Jx8N1RfG6JEti6mOoUKFChQoVKlSoUKFChXrh9WMFpp5vGAUeELAsi/n5eRYWFmi2Wmiq5/bxYnMekIr6TqN+qNDpdKhWq7TbHSzLxLYdccLsOC6ZTJZutyticNsr6n2KxWI8+eST9Ho9xsfHyWQyNBoNslnvdkE/UT+M6F/u7eJ0SQCCWCzGysoqK6urPnjanhbnuXG8CXSKIiOB6A3q+TG3eCJOLBr1AYHs9065onS712pRrzeoVb1pcbrepd3p0OybeAcSBw5cQaPRYHxsnJHbRoRTZtuFkxGgpdVqcerUKQzDYPfu3QwODnLmzBkqlQqpVIp0Os2ePXs4fPgwQ0NDKIpCuVxmaWmJTqfDq2+4gcmJSe666y6mpy/6UTZwHNuDbMkMhUKBl117LTfffLNwbVmWRalUQtd14QTKZrNe5NF3ORlGD2+2nCsmANZqNX8vSCSTCRGVU/3IYzqdJp/PC/AWi8U4/dRp7rnnHnq9HocOHeK2224TwCUej1MqlUgmk3Q6HXRdZ3R0VERG6/U67XabdrstooFBxM62bXHsGIaBLCt0Oh0KhQKrq6vMzs4SiUQEXB0bG2N0dJRisUg6nWZlZUX0TZ0+/RRHjx7hyJEjxGIx2u02AwMDvrOq40O4JIZhkEgkSCTigBcvDY4/D+BGyWQyFAp5VL+cXLLBRkKSbCTJg1Ou6+CKbvS+eGPf18GXOwGVLCCr47hsbZVpd9r0TC9qafvQznEcb8/19a/LshyCqVChQoUKFSpUqFChQoX6MdILDqaeTxgVRJSEy6PXY2FhgdnZWZ588kkP7ESjRKMxItEI0UgfkIpERBm4rhvCIeU5WbwCaO/EWfajdy7pdIpqNSI6iIRbynepqKrKwYMHURSFdrtNMpkkl8vRarVIp9PUarUdBduO4/hRvW0Y5bqumMyXyWRQFIVvf+cB0UeF67lEgil3iqwg+9DJixV6y2vZNkl/XQPo1R8ZBOjqOo1Gg3LZAwGdTodWq0WjUafTaSNJEpFIlCNHj/DqV7+a2dlZsX+DKXWrq6v+csqiBP7b3/42iqIIp9Sjjz4qtkEmk+HAgQMcPHiQgYEBXNdlcXGR+fl5TNPENE0MwyCZTPDmN9/O+fMX2NzcpFQqMTg4QKFQEFPnIpGIV/oeieyIVjYaXvSyHyAG8b9+4OQ4DhsbG76zqU2z2RD9YbFojOJAkU6nA0A8kUCWZWzbJhqNcu7cOebm5uj1vC6o173udcLFNDQ0hOu6osfKtCxSySSNRoNazYvGqarKrl27/MJ4R/RdJZNJer2e+D4Wi7G5uUkymWRqagrLsjhz5gy9nsmBAweoVqusra2xtbUlJhAmkylc16FSKTM7O8vq6iqxWIypqSkmJycFFFVVVUDTwCkGgdvLi9YFfU+xWAxV1cRx5LqKb1xy8eqkHFw3AFCuOE76nyOXT/STJJlIJEI8HkPTIiBBzy/Wdx0Xx3HFPnUcB8fv2XL7yFTQuRUqVKhQoUKFChUqVKhQoX489IKBqecLSAWTy3q9Hq4LsVhUdBktLCyytLQk4lYRH0RtR/eiopxcURQsyytzDrqGttfBnyYmSYDjQyRXOKxardZ2IbUPxUzTFI8XQKdWq0UqlWJgYIBOp8PAwADlcpler7cNoqTt7ZbwO4QymYwASnNzcywtLXquH00DSUJCQlU9t1Q/bNI0DdkvNI9GIiiqIrqHgs9e1g9sx6bVbLK5tUWz2aTb6dJsNUW5uGWaaJrGi150Fe9//x2i6F1VVeLxuNfbJEvUajXRNXTu3DkeffRR4vE4ExMT5PN5Hn74YQzDIJPJkMvlOHLkCFdccQXFYpFer8eFCxdYWVnB7Jn0zB7dbpdGo0G32yUSiXDw4AGuv/5VxKIxXFxR9B1MyOuZPbSI5gOZJLIsY5peP1Mul8M0TW+ZY3FMPyIWTNgzTRO9q2PZXtxxfGycSrVCo9FgYO8A+XyeSCRCp9MRrjPTNEkkEjQaDXq9HpblxQcDt1jwOXC0xWIxWltbTPrbwwM8KnNzc8TjcbFNM5mMAEa9Xk/AtkgkAoCmRWg0Guzdu5exsTHAc4RdujSNYfSwbYd2p4NtO3zta1+lXC5TKBSZm5sXLqd3v/vdfhzRRdf1HZMoO50O+XxegNNtN1MAqLb71zww5eC4EpLjwSjX/89xHQ8e4fqxUv9+kMWxriiKD6TiPqRTkCQvnRdMWfSeWwGMcrZ/dtlYwGByX6hQoUKFChUqVKhQoUKF+vHQ8wqmnk8YZZom7XbbLyGPkE6ncBzHB1IL6LqBLEtiql4sFicajRCLxYnHYwJUBcXj7XabSqUiIIgX25P8aWvSjqid5U+mcxyHfD7P2toaFy9eZHx8XJysB4AqgFTBcvd6PZLJJNlslmazKUqmDcMQkb5IJMLExATpdHrHtDfXdTn91FM4juudwPuxPQBV8ZdTlpEVxSs996N6kiQJ15SE11kUdBcFYMG2barVKrVaja7e9dxSzSatVgtD15FlhauuOs77f+X9JHx3WDQSRZIlisUikUiEXq9HrVZj9+7dzM3N8fDDD5PL5ZiamiKZTHLixAkcxyGTyVAsFjl27BhXXHEFuVyOZrPJmTNn2NjY2C7PBtGzZOgGpg/HAndUsE2D6XrBttN1Hdu2vX4n18W2HYaHh3FsB13XUVWVWr2GLqKJEI/H6fV6GD1DRP10QyefzzM5OSmKzoP4niJ7/VKWZZGIJ3jHO97Brbfeiut6kcB4PC6eD8Gkx3a7TTqdFj1RQZG5aZqMjo6iKIooLldVFcuyhHsp6DbrdruoquYfwxpra2uixF3TNKamdvn9Yh5wGiiVGBsb5fz583zlK3/PQw89iOu6FAoF/tE/+kcgSbiuTafTIZFIkEwmcRwHwzBIpVJi6l4AZ/u7n3YU9Es73VGu4/ouJ2fb1eQfb5IsIbnBcek5nPr7zoLKqH4H5PZ9OuJ3trulXBEJ9ADl9oS+UKFCfW8FPXU/qGzbFg7SUM+/NE0jFov9ULdttVovSLVCqFChQoUKFepnV88bmHo+3uQ4jkOn06FWqwnAUygUAJifX2B+fp5ut4skgaIqaD5g8GCGV3Iej8d3dElZlkXH71DSdZ1ez4uPBV1JgP97No5j+2XeDrZtCQiRzWa5cOECu3btYu/eveI2QfQqcFMFheSdTodUKiXg1MDAABsbG+i6LmBXNpt92nS+arXK7OwcEU3zJ5gpovTZc335vx+UR0sSjusiOY4HqxTPVRSAD1mWPVeL7dBsNtnaKtPtdNG7Oq2WB6WCfqZXvOIVvO997xV9Q8H+LuQK5HI5tra2cByHarWKqqosLS1RLBbZu3cviqJw4sQJJEkik8kwODjI8ePH2b9/P+l0ml6vx9bmFvV6QwAzwHd9Rchms6TT6R1xs2KxSLlcxjQtFEUmHo+TTqeRZYmVlVV/Ap83XTAej2EYhoCOxWIRVVVpt9uik6jVavkl4U0GBweRZZmBgQFSqRS9Xo9Op8PWVhnD0D2Xk6YK146qqeQTeQGWIpEIhm54k+h88JdMJDly+Agurv9YLrGY57oK+rCC/inTNLEtm57ZQ9d1XNcll8tTLBZIJpO4rku73UZRFOE0q1ardDpdv+vLodk0cBybXs+k1Wqyd+9ePvjBD/DVr36VU6dOUSgUGBoaQpYkLB+YBg4u2/amGcZiMREV3Y5+9kdA6XPducJJ6Lrbxe1e5M4vKKd/suR215QAT3iQyRXRPw9GCdgVfBZfe/ciXnkkkCXJL4YPFSrUs9HQ0BDFYvEHvl273eb8+fPPwRKFejZKp9Ps2rXrB76dbducOXNGXJQJFSpUqFChQoV6PvScg6nnA0hZlkWj0aBarWKaFplMmtHRUVRVZXFxkUszM3TaHRFRUxQZxYdSwdXgZDLlF55r/sk76LqBZXkgyrYd0SVlWRaGYQD4MEkjODn2Tri3p6UpikI+n2djY4OHHnqIbDZLqVRCVVXRa+Q5sGxveptfwv3kk6fJZjNMTEzQbrcZHBwUkcRY37h7cSLuujz55Gn0bhdV01BkWcT4ghPyIMrnnfzvLJP21kXDdRzh/rJsG9cvky6XyzQaDQzDQNe7dLtdDEPHdR1e8pKX8su//Ms+2HKRJM/ZlUqlmJyYpKt3BdSp1+t0Oh1GRkY4cOAAuq7z4IMPoqoqqVSa0dERjh8/zr59+0Q59/LyMnv37qVWr7G8bIhooyRJYlpi4IRRVY1sNsv09DSlUonh4SHhUmu32+Lxg36qwF2XTHql3gGQU1WVfD5Ps9n0nFL+vw0PD5PNZkn4HVLe9tBpdzo0Gg1i0SjFQhFN03ZAPoC7776bV77ylYyPj+PiCgATiUTIZDIAAij1el1WVyu0Wi0RZYvFYn7p+PaHpmnieLRtx1+fnnBcZTIZotEoyWSKra1NdF3HMAw0TWVpaY1arUYikWBjYwPHcfiFX/gFrrzyShHVkyRvmqCu6+Lx+vvNjJ5BPB7fAUiFSwqvFyqAUoFDKnh+9E8aDAr2ZVnCdSRcOTiu+15DhAHKBSkAT31hPb9Kyr3MKeU/C0Sh/8LCIgcPHhTPt1ChQn13zc/PMz8//0IvRqgfUJVKhUql8kIvRqhQoUKFChUq1LPScwamnmsgFUxWq9cblMueGyedyTA8PEwikWBtbY1z587TaNQJYkYBJNA0jUhEIxqNYvZ6RKNRUqkkgCjTDiBT8LVlWbiuI2JvHpAwkCRjR5wuiG+5riuAj6IoxONxarUap0+f5vrrrxfww7Is0um0iIEFccNTp07hui7Hjh3jmmuuQZIkxsfHWVtbo9Pp0Ol0RJQpKPA+d+4ciqqiyF4JNX4Pjzgp7wNSLh6sChT8G/70PADJdbFsm2arxebmFp1ux3MHdbsYPQPXheGREe6445eJxaLCSRN0dk1MTKBFNBrNBpqm8fjjjxOPxxkYGODYsWOUy2Uee+wxMclufHyca665hr179xKPx1lZWeHkyZPous7i4iK1Ws13BOF1ZvnOnKAPLOgGazabZDIZdF2n0+kIJ5Wu6+i6jqIoZLNZbMvC8cvjAXK5nIAV1WpVgLiIFiGZTIopebZtU6lUxOOp/ja3LQtH00hnvIhlt9sVzjtJknjTm960A4aIom+g2+36jijDj5pJoox9oDSApmlEY1GSyaToE/Oinya9Xk9ETcvlCtVqRcQAVVUlnkgQjUQZGRmh1W7TbrWwbYepqSny+TyapnH+/AVs20LTNPbt28f4+ASJRAJAOMtisZg4pgOHXzC98OlASvIhZVDcv11Mbts2tuN9OI7nlnL92+DKyAFq8hKCffDI9R8bAaX8lvQ+PBV81R8fBCQJGQ/CVioV1tbWmJyc/F94BQoVKlSoUKFChQoVKlSoUD8KPSdg6rmEUq7r0u12WVhYYGVllXw+RzqdoVQqks1maTQaPPjgQ6yvr4tOpuAEXdNUv3A8RiwWE0XoATxot9uYpgegLMui1WrjOLbfG7QNnFRVJZlMoSiKf9JuI0mB08j2AY2MLCPKriORCK1Wi2q16k3X85fJtm3S6bTnuvEdPadPnxaOrFOnTrG+vs7LX/5yIpEIQ0NDrK9v0Gw2xbrpui5cV5oPyQRA8J0oQVu047jIMt5UPteb8uf6zpKgt8l1wXUcr6er02FpaZlavSacQ4ZhYPZ6uK7Dz/3cz7Fnzx663a7YP67rMj4+Lrqh6vU63/jGN9ja2mJoaIirr76axcVFTp8+LSJ2U1NTXH311ezZswdN0/j6179Ot9sV5daGYRCLxcjn88Tjcb/EWhaAJoBQzUYTRfVKxHtGj67pTW3r9TwX0dDgkJiKGMDEzc1NVtdWiWgRxsbGaLVaHrQ0TfL5PIODgyQSCXq9nljHTqcjCtVN06RarQISlUrF71jyYqDRqNez5Touc3Nz7Nq1C03ThGPIMAwRiTQtC9kHn4qiEI1GPAhWrWKaPdFzFTikgm0D+OviTa3bvXs3sVhclK9rmoqiqrRbLc91FY+j+51c6XRGPN7m5qbYH8PDQ0QiURELDOBe8HjBpEJNixBE8KTLQGfw/OuP7wXRveDroF9qp3NJugykBnDKa+Lvj4oKEBaUSBGAsX6nlP8vsnd/tu2wuLjExMRE6JgKFSpUqFChQoUKFSpUqBdYPzIw9VzAqMvv0zRNlpeXWVpawrIsCoU8w8Mjfv+Fy5kzZ5mZncEyLf9kVhYuqcAhFfRIBRO+Wq0WhmGwvr4uSqIbjYYPwHS/Fyjux/tcASMsy+tfUFXVX07vBLrd1gUE8WJKHlTp9baLrAPJsizcUoZh0Gw2UVWVq6++mvvvv19Mb1tcXOSJJ57gRS96EZVKhW63I07OHcehXm+QTqeIRCN9QED2WZQrzsw9qOBiOxKuZXnAwJ+Gtu1s8SKLtu0Bta2yFwfQdR2zZ9LtdsXjFwoFbrj+evL5PLlcTsCGWCxGsVjEMAxWV1e5++67abValEolXvrSl3LhwgXOnz8v+rf27NnDVVddxa5dXjH3pz/9aT760Y/ya7/2a+RyOQABBbe2tkQRbwBUggmHmqahaqookVdUhUwmIyBUAAPb7TbdbldMs3Mch1QyRSQSYXV1jUjEi6vt2rWLRCIh4n7B/ggmGgbAS9d1Go0GqqoyPj5OoVAQ+zoWi2GZXvl60BkWi8Vot9uiN8yDmBKaX/DtrZeKbVuicLzRaNBut0VvWlAI7k3bs0TMs9czxXHnFfTLWJZFs9kCXD+uGhFx0MAhOD4+TrFYFNvSc0t5rsRKpYLreGX9mqYhy15XlgdgdxqTtiXhut6AgMsdU0HxuTA59YOjoDhdkVFkRQA3aZutBq8O4jm0wxi1fXfez/0bSr5bytvWLrVaTQDjUKFChQoVKlSoUKFChQr1wul/CUw9V86oy+83ABIzMzO4rks6nfZdHcMkk0lWV9d46qmnqDfqnkvCnz6n+ifSkUiUWCwqgFQ8HkdVVSqVCjMzM+K+wQM0tVpd9CWpqlcWHYvFfAjVv2TeyW7gYhkYGBCAK+ieCgqmHccmk8nwyle+Etd1BVhwXZf19XUqlQqlUgld1zl69CjtdpuTJ08K99Hhw4ep1Wq0Wi1s2xa39YqvsxiGIeCL47r0s4JgUpnjuji2Ldw6lmVj2RY9o4dpmZg907ut3ytk+rCj2+3QM3rohk6706ZnGNi2TS6XY2JiQkQQgw6vTCaDbdvMz89z5513YpompVKJa6+9lieffJKZmRkxeXD//v0cO3ZMTLb77Gc/y3/7b/+NXC7H0NCwV1Tvu48syxJOmv74WBDnC4CRInsxxmCaYK1WExG+Wq3GyMgIiqKQTqcBD9gFbrBSqbijd6tarYrtHRwfAcwwTVN0NjWbTZLJJJIkkUxux0IzmQyVSoVUKsXAwICAM81mU0y0SyZTyLLiQ1RFPLZhGCK2F0xg7HQ6lMsVolEvXqiqqgfHDIOe76IKPgyjJ1x6ut5FUVRi8RixaFT0WmXSGWKxON2u18EWTPoLStdlWfaAWCopYoTepEuvw+ryCJ94/krgiDjrdnF531Onr/9JEi6zACSrqoqiKqITbsf94+K6ktjH/h1tA66AYgXLxvavuLhIPqwL3F+hQoUKFSpUqFChQoUKFeqF0w8Fpp4Pd1QgwzCYmZmhUqmQy+VEKXWpVMKyLB555BHm5+e9k8wAWsj9LqkY8bjnkkokEmKq2PT0NOvr66yurlEul7l06RKKovg9OgrxuBf18wrDFRGdkySZXs9AUVR/Kt92FCkWi9HtdtF1g2g04kcAt0FQ0IVUrVZJpVICUgwPDTMwMEA6nabb7VKpVLjyyisFiHrJS14iJgMG0CJwbmmaRq/Xo15viBiYZVk4rudM2YZRXhTRdmxM0xQgxjQtbP/n/aXUQX+VYRjeJD5dp9loUK/VaLWaOI7Dy172MvL5/I4C9gCkzM3N8cUvfhHHcQSUOnXqFAsLC6RSKXK5HAcOHODIkSOMj48jSRKf+cxn+MQnPkGlUuEtb3kL8XgMvetFHNPpNPF4XBwriUTC62CSFSTZc/6Uy2WazSadTodYLCZ+LxKJiL6nTCZDvV6n3W6TSCREqbmmaQL+BOtt2zaO7eCC6PICr6B8213mbTNF8Tqsgm6oAKQF5ff93WOBUykAZ+1Wi0w2g6p62w7Ji/4FgCsAn5oWIZlMUa1W2Nrq+tHVJs1mw58Y2cN2HGRJQlFUVFVB0yKMj4+TSiVRVc0Hfd6ERsuyANAUBU3zetOC7RJs6wBSxuNxNC2y47jz1vfy53HAhLx18CY7BpJ2TCIM4ncBUO3vgFM1VTjHgkmSwhq1oyzdL10Pgnu+S2rnz7zPfiMVsiKL4zRUqFChQoUKFSpUqFChQr2w+oHB1I8aSn23+3Ndl3q9zsWLF1EUhaEhb7ra4OAgqVSK9fV1HnvscRrNhnf6KUl+6XfQJRUlFvf6dIIT7Wg0SqfT4cknn/SdMA6tVlP0O2mahqZpHvgaGCAei4mTd0WRBUhIJOI+aLCxLBnXhWaziW3bvsOkRa+noSiqWBfbttja2uLuu++mVqvxnve8l1wu68WafLhVq9UIpvKZpskrXvEKdF1HVVXhrunv7gn6oTrtDtVqlWjUc/JsF0x7MMT0I169Xg+j18MyTSzb9qOLFpblgRjLj6vZjjdR0DRN2u02jXqdZrNJq9UU098mJye47bbbxPIGjqloNMqZM2e48847kSSJUmmAl770JZw4cYLV1TXS6TSFQoFDhw5xxRVXMDY2hqIo/Omf/imf+MQnkGWZt73tbRw/ftzrMYpFaS41mZiYEDAhADvtVptGsyG6qGq1mnAQjY2NIUuyDwp11tbWBABJp9Pk83kBKQNIV6/XBWCLRqLIiowW0RgdHSWRSKAoCo1Gwy8p16nX66yvr5PJZIjFvE6qQiFPo9GgUqmIqX8B0ApgX/9ytFotut0uW1tbomA9iCq6Lj4kDMrEbWRZolAo0G63qdVqABSLxR1OI9WPBAbHaBBpc/2JeK5joihR0bemKLLfmeW5tZLJpOguk2VvGxYLRQFbTdMS5e6Xu6WC5ZYlGXy3m4TX7STLXjwPBQGFJNkDyZ4rShb9Xz2jh9kzURQZVdWIRCNEo1H/mPcwkwvIsueQDBS8FsiS75wUji7v37b7u6IhmAoV6lkqHo9TKBT8KHn3+/5+4KhMJBIUCgWWlpaeh6UMdblUVWVwcFD0V/4oNTk5SaXiTY79QVUqlXAc5weaGlgoFKjX62KCcSKREH8Dv5+uvfZaAGZmZlAUhVar9UMt97NV8D4lmM4cKlSoUKFChfr++oHA1I8SSn2v+3Ich42NDebm5sjn80SjUSLRKEODg9i2zRNPPMn09LSYAhdEu3a6pOIkk4ltZ42i0O3qXLhwgY2NDTqdjugIKpVKvPKVr2RkZMR3zWiin6m/aLnT6TA3Nycm6AXOkHg8JqJB0WiURqNBLBYnkYgL8OE4DoqicPbsWWzb5hvfuIfXv/71AnZtx+sshoaGsEyLru65Ynq9Hvl8XrwJC5ZHVrweo7X1dUzTFNDDtm26XZ1Ot+PFunp+vMsysSwb0wdUhuGVphu6Qa9nYPsAxfIdVZ1uB72rY/QMLNPyp8VBPBbjLW95C+Pj4zv2o6ZpPPDAA9x7771i+t4111zDt7/9bTY3t8hk0hSLRY4cOcKePXsYGRnxe51WGR4e5p/8k38inGPBeqiqytTUFJZloXd1emaPcrlMrVYTvV+aqiH74DAWizEwMCAcbq7j0mx53V3FYlHcZ6vVolKpsLS0xPLyMsVikRtuuIFEIkE6ncY0TR5++GFuuOEGcrkc8Xgc27Kxhr2oW6PRoF6v4zgOo6OjAOi6wcDAAIuLC4BEsVhEkhBgSpYVNG37DasHNFt0Om3huOr1TNLplOiACgBTUAgeTJjMZDLE4wkajQaSBOl0mmQyKZxetm17k+hkfzoeroBb7Xab8lyFRqOBbVv+cyVJPJ4gl8sSi8XENMOgWD4ai4rpk72eIWKF21DKC8oFk/IURfYHBWjCDeW6HkhyXNX7bdf1jkfTpNPxjk/bd/c5/sS+wPUnAaqmkk6lKZVK5At5ov6Ew8vBWPC8lNguTg8cjfF4HMuyd0wYDBUq1HdXOp3mox/9qOix+53f+R06nY5wnAbO0FgsJi6k/P7v/z4f+chHSCaTXHvttXzuc59DURQikYh4bQkuBAXu0jBa+6OVLMv8i3/xL0R347/9t/+WxcVF4dbtf/0LLtoFfYyJREJ8H41GAc+5Gzhau90uH/rQh7jzzjt5/PHHabfb/nuhuHAcRyIRcRHNsiwxvAPgyiuvRNd1Tp48KeLxASgK3M6AGAITi8X4zGc+wyc/+Um++MUv8tu//duMjY3xq7/6q2IybKfTQdM0cTFI0zSxHG9961v54z/+Yz7zmc8wNzdHLpfjox/9KCdOnBDLZdu2uE1wP8GQleACZXBBBjxYGwxmCeL9mqbRbre5+eabGR0d5TOf+Yy4bahQoUKFChXqe+s5mcr3vfT94JZt26yurrK+vs7o6Kjo4CkWCzQaDR555FE2NjbECacsSztdUrGYXxidIJFIIkmwsLBAo9EgGo3iOI6IVgUOrI2NDbrdLhcvXuybupckl8v5bpg4iiKTTKYYHR1leXlZvAGXJBnLMrFti0ajgSzL4k170I0UvMEJnD6NRoMHHniAqakpjhw5Ipw0wcfDDz/M2NgYo6Oj2LZNPB4X0wibzaaAZbIsU6vVRFm77Tg4fndUu9P2gJNpYhg6ug+fDN3wAYiB4fdIiV4iyxQl15lMhsOHDjE2NsbW1haPPvooqqoyOjbGS178Ym677TYBKgKX0v33389DDz0k+r+OHTvGfffdR61WI5NJMzAwwJVXXsnExARDQ8MCSm1sbDA0NMTAwICADMH6AWSzWarVKpubm7RaLQFMFEURLqP+N8H1el0U2cuyTL1eJ5FI+BPqtB1dUGfOnGF1dZVUKiUgqKZp3HnnnZTLZV7+8pfTarXEhL1gWl9Q3F4sFsX+CDqngtJ87/hU+iJpkihm98CUQzKZIJ1O0et5fVC1Wo1ut0MmkyGdzqBpXiRvW94xH2z3WCxGo9lgbW2NwcFBstkczWZD9HF5v4+IiAbbJpvNcvDgASqVCuvr60xOTjI8POLHVBVWVlYpFPIeuDQM4vGE2B89wzu5CE4atvdZAILwTzhtvyxd9uGxg21bGL2eBzwNA9Pyo6S+6862bW9qn90/xc+PozoOW5tbzMzMkkgmmJqcZGxsrG8iX7CFgn4pBLSOxWLkslkkSULXdRKJxNOAVqhQoZ6ufD7PyMgI/8f/8X+wsLBAt9vlPe95D9deey3dbpff+73f481vfjPXXXcd5XKZz33uc7zhDW/ANE2+8IUvMDk5yate9Sr+0T/6R8iyzIkTJ/jzP/9zfv/3f59Op8Pw8DD//t//ex5//PEXelV/qqSqKsePH+f/+//+Px555BE2Njb43d/9Xf70T/+UPXv2MDw8jCRJvOpVr0JRFKanp/mjP/ojfuu3fovh4WGmp6f59Kc/zX/8j/+RdrvN//P//D986EMfQlVV/vzP/5xIJMIHP/hBstksH/3oRymXy/xv/9v/Rjqd5vd///e55ZZbuOKKK7jnnnvYv38/k5OTgDdpOBjm8eEPf5jDhw8zODjIf/2v/5XNzU3+2T/7Z6yurjIxMcGHP/xh4SRXVZU77rgD27a56aabWFhYIJfL8Tu/8zsMDQ3xuc99jvX1dV796lcLAPdXf/VXvOxlL2NmZka8f/iN3/gNbrrpJl7/+teTTqd505vehCRJfOQjH+FDH/oQv/u7v8tv/MZv8M1vfpNf//Vfp9lsksvlWFlZYe/evfz2b/82o6OjvPOd7wTgP/7H/8g73vEOxsfHGR0d5SMf+Qi33HILL37xi5menubb3/72C3kYhAoVKlSoUD8xel7A1LN1Wpmmyfr6Oq1Wi7GxMRzHIZfLkUqlWFlZ4ZFHHvGvzHmTuIKTfk3TRLl5Mpn03R/elbtLly6xsbGJ49h0Ol3W19fEFcFcLiciXKdPn2Z1dZV2u4OmqWSzWdJpz+Gza9cuisUium7QaNSFfT2RSFKv1+gfYx8UsyeTSRRFIR6Pi5Ph4MpyIuE5XR568CE0TWP37t04fgeUZVmUSiUxZS7oltI0jWKxKEq4g86iWCzO4OAgzVaTZqPpRe7abQyjh657MbZOu0On0/FBgClgVM/0roBrqkqpNMCePXs4dOgQhw8fZmpqikwm7TvNuiwvL6OqKrlcjnQ6LSBfrVZjc3OTRx55hKXFJTLZDCMjIxw6dIhvfeteWq0mmUyGoaEhjh07xsjICKVSiWg0wtraGhsbGwI0BVAw6DHSdZ1Ox4splstlARKDvilFUQTQabfbdDod0um0sM9bljfVrlwuA4iC9KCkfm1tjVqthqZplMtlGo0GhUJBXDF99atfLa4WB8634Kqut/8TDA8Pi2jA6uoqpmmKuFgi4Tn2AqDjOA6maQlY2e9IKpVKxGIxVlfXfPebg2manlsrFvdjaUG/2HbpuyzLDJQGyGYybGxseJMUt7ZotVq+I8mi0+liGDrxeJzJySlSKe/YDMrNDxw4IADtwECJ9fV1NjbWSaWSrK2t+cd1SjxHbcem2Ww+7UrwNiz2OpwkydwRL9X1Lp1OxyvWNy3PIWXZWLa9Xcrvw9Vg2ziu60UQxUQ/D151uh02NzeZnp5mbHyMWDTqAS3HwXEdr4fKh5eRSIR8oYBt27SaTQCxPqFChfreWlpa4r/+1//KBz/4QTKZDP/n//l/8p73vIe/+Iu/4LWvfS0vfvGLqVQqnDt3jltvvZVPfepTPP744/zn//yffeA9TDqdZnZ2lk9+8pN85CMfYXl5mdXVVf7gD/6Aj3/84/4U0FA/SvV6PX7v936Pd7/73XzgAx/g//q//i/GxsaIRqNkMhmKxSKyLPPUU0/xqU99ik996lPcfPPNXHfddXz2s5/lHe94B/fccw8HDhzgfe97H29+85u56667+OpXv4okSfzCL/wCn/3sZ1FVlZtvvpm/+Iu/YHp6mmPHjnHjjTcyOjrK+fPnOXnyJG9729u44447eNvb3ib+ZgYxw2BAymte8xpM0+Szn/0sDzzwAH/+538u3u+AN4jkr/7qr/gX/+Jf8Ad/8AfCcT43N0e32+XNb34zH/zgB/mlX/olrrvuOt773veiaRqvf/3r+cM//EMARkZG+IM/+ANGRkb4+Mc/jmVZnD9/nte85jWMjIzgui7XX389hw8f5k/+5E/I5XL8s3/2z/jYxz7GX/7lX3LjjTfy4he/mNe//vU88sgjjIyMcNtttzE8PMznPvc5isUi119/Pffddx8LCws88MADL9TuDxUqVKhQoX7i9JyCqR8k+tfr9VhaWkLXDUZGhrEsi2KxSCQS4fz5C5w+/aR/Inx5dC9CPB4jkUiS8ieHRaMxdN1zQFUqFVygXq9TLpf9wm+vKHtpaYler4ckSRw9epTjx6+mXq8RiURRVQ/IVKtVHn30UZrNpojkmX4XU+CIGRsbY2CgxK5du4RbKujusW1bTISTJEnEHxzHYWtri3K5TD6fJ5FIbEew8N6EBSXetmXTtJtEtAiHDh0iGo1gWbaIVgQF5esb6ywsLDA9fYnHn3hCAJt2qyXWu+dH+FwXxsZGufbal3HDDdezf/9+UqmU3+Ok0Om0OXHiBIVCgSuuuII9e/ZgWZ4rbHp6mnK5jGVZRCIRZmdnva6lbJbx8TH27dvHvffeS7frRRFHRka48tgxBkolCoUCsViMjY0NNjc3hfPHtm06nQ5bW1tUKhU6nY4oDi8UCoyOjoptGMQ+4vE43W6XeDwu+pwikYjooQiOP1VVqdVqpFIput0u0xenqVQrDA4OksvlxPQ8WZbZ2NgglUpx4403kkqlhAsruMIbROwCp5ppmgwMDJBMJkVnlWEYRKPRHV1PwX1MT08zNjaGbdusra1hGAapVAqQ/FL4LNVqze+x6tJsthgaGiKXy4npdJIseyXesix6oSIRjUQiwcrKCoZhsLVVJpHwCsw9N6HXXbW+vsb8vB/dtB0URWZjc5MLFy5g2zajo69hdnbOczYZXmwylfKihY7t0O60hdMt2DaB82jnlMSYAIyNRoNWq+0DKVM8f4K4aKPuxSKbzQadThfLMkGSiGgR4vE4qXSKZDKFpmq47k5ItbGxQaVSYXRslFQy5U+TtERhfCQSIZ5IUC5vocgq4CIrCul05kf3Qhcq1E+xBgYGKJVKfOxjH+Nf/at/RalUotVq0Wg0uPPOO1lfX+fXfu3X+Hf/7t9x0003ideA0dHRHXGxYJiH4zjMzMzwrne9i/e9730cOHDgOZvw+7OsSCTCVVddxac//Wluu+02jh07hmEYHDt2jIMHD9JoNACYmppi165dRKNRyuUypmmyubnJn/3Zn1GtVpmfn2d2dpZyuczevXvZvXu3eC9UrVa9SbiKwgc/+EHuv/9+MXDEsiwefvhhNjc3sSyLO+64g2uvvZYzZ86IZQy6NS3LQpZlTp8+zRvf+Eb27NlDoVB42nFx991389hjjyFJEq9//et53etex8DAACdPnmTXrl2+0zhNr9djZGTEj9Yvsra2Rj6fp1wu8z/+x/8QF0D/7M/+jD/6oz/i6NGjqKrKF77wBf79v//3/OVf/iWdTke41IPofr1eR1EUf9KtzsMPP8xTTz3FBz7wAdFxqareIJHBwUGSySRN/2JIqFChQoUKFep76zkBUz/om8xeryeuehWLRUCiVCohSRKPP/EE58+fB9dFkuS+6J5GNBolkYiTTKZIpZLiDZFt28zNzYlizfLWFrVaTcAZ2/a6LGzbptVqs7Kywte//nXhdgIJ0+yJOFnQS5RMJimXy6iqytDQEKurq35HT4yxsTE0TUOSJLLZLJqmiil+iUSCVqvJ/fffz759+5iammL37t0cPHiIXbum0HVdFHoGrinTNOl2upiWSSadIZvLMjMzgyRLwlEVi8VIpVLe5DPXJZfPMTo6ypVXXkmv1+Mb3/gGjWaDVtMr2O71vC6El770pbzqVddz5Mhhcrkcg4ODolPBK7vucPHiRa688kpSqRSWZbGxscH8/DypVIpCoUCxWMQwDC5cuEClUiGdzjA5OcHk5CT33nuvcPuMj49z5MhR8vk82WyWeDxOtVplY2ODWrVGtValWq36U+e84vnx8XFisZgoAQ8iXoHjDDzYdOLECT7zmc/woQ99iGKxiGmaAKKvIuhyGhsb4+zZs7TbbS92aPYoFoskk0kmJyfJ5XKi22ppaQnHcchksjQaDYrFonDuJZNJVldXvc4mP5a5sbGBYRj+cYM4xpLJJK7rsrK8Qr1RF71XtuOwublFp9NhfWODRDxBvd6gVqujqoqIDAZvblutJpZl0u16kZd4PI4ky165t3BNeX1nqu986/V67Nu3j42NDU6fPg1IYsoeSNi2Rber02oF/VYKsVicF7/4GlZXV6jVqhSLJSqVMrquMzAwCIBuGGxtbZFKpdja2vLu7RnAVACLLdtiddWDbx4oMun1TFqtJhsbm2xsbLC2tkqr1RIdaUF8Lyj5D47JeDxOsVhidHSUdCYjXmdcv+NjbnaefD5PJOJ1hAQvQbKiMD83j+3YDA8NARLRSIRkMvG016ow2hcq1NMVTIN93/vex913383Jkyf5vd/7Pd7whjewurrK/Pw8d955JzfccAP33HMPlUqFz33uc9x888387d/+Lffddx9zc3O02210Xeeuu+5ienqav/iLvyAej7O+vi5eT0L96GRZFuvr67z97W+nXq/zV3/1Vzz44IO85z3vYX5+ntOnT/OiF70Ix3F497vfzX//7/+dEydO8Md//Me8/OUv5+TJk9Trde666y5s2+bP//zPueOOO3j3u9/Nn/3Zn/Gd73yHtbU18bd4fX2dn//5n2dzc5MzZ84wPz/PysoKnU6HT37yk4yPj1OpVJifn2d+fh7DMGg0GqysrOA4Dt/5znd46KGHhFt8aWmJTqcj1uWuu+6i2+1y/vx5hoaG+MY3vsG9997Lvn37GB4e5utf/zr79+/nT//0T9na2uLKK6/k2muv5b/8l/8CgK7r/M3f/A0XL14Ubt/PfOYzvOpVr+KJJ55geXlZXND4yle+Qq/X46677sKyLL7xjW9Qq9U4ffo0uq7zrW99i/e+972kUim++c1vct9997G5uUm32+Whhx7iscce4xWveAV79+7lsccee6EOgVChQoUKFeonSpL7LChSo9Egm82ytLREJvPdnQY/zFVPy7JEt1NQhp1KpbBtm1OnTjE7N+dPgA/GyStomgdlkknPJRU4OlRVxXVdNjc3OXv2LJZls7G+Tr1RF24NSfJOciMRzZ/Wts65c2c5cuSIiF8FnUVeFCzJ8vISa2vrRKMREskkU5OT4iQ6kUgyMjJCLBb1J/K1WVhYRNe7qKqK4zjCbdXrmVx33csExPIm/CVQFZV0Ji1cQwHcCBxOpmkSi8WYmJgglUphGMYOGBC4x4Jy583NTWq1Gh//+Cf4m7/5G7+s0yUajfGe97yb973vfaITyZucV8J1XRFvW1pa4uDBg+zfv59yuew5knom5UqZtbU1tra2qFarrK2tAV457tTULoaHhzhx4gSO45DNZpmcnOTw4cOk02lxJfP06dM8+uijwPakp3Q6LTp/+svmG40GqqqKjq3+dbYsiwsXLlCtVtm3bx+yLIsJd0FZafCmVtM0NjY2KJfLjI2Nif6vYDpbt9vFcRxxJThw2gyUBsjmsiwvL+M4DgMDA6KDynFcTLPHxsaGiGu2222azRaJRBxVVZmYmKBer4tS1sAppKoqW1tbXLp0iVKpRDyeQNc955d3346ArOvrGxiGTjQaJZvNMTY2Sjab9Z1S23G+AFJVKhVc1+Xw4cNUq1Uee+xxNjbW/Xjeht9Rhh9lzYr9EovF2bVriocffphWq83+/ftYW/NihYcOHeLgwUP+8qyRzWa5//77xfCBQJ47QqJQyJNIeHFB27Ex/YmQtXqN5aUl1tbWxElqu90W+y041l3XRdM0f2pgVJTbelMfYxSKBSYnJkkkkt5sPteb0CdJEqlkEkCU0na6XSzLIpPJkM/lABgYKHH99dfviIkE+mmBU41Gg9HRUer1+vd8zQ71s6ngb/oLpXQ6zS//8i8zODjIqVOn+Lu/+7uw/PwF0Gtf+1rK5TKPPPLIc/YYsixz++2386IXvYjV1VX+9E//9LtOxJuamuJd73oXsViMO++8k4cffviHftxEIsHx48efdceTpmn86q/+Kuvr63z2s5/9sXbxha/roUKFChXqp1E/EjD1w/4BdxyHixcvYts2U1NTRGMxopEIhmHw0EMPixHTsiyjqAqa6rmk4vG4AFLJZNKPn8l+51GdhYV5qtUqy8vLwjkT9E/t3r2bAwcPIiExPz/HzMwMDz74INdffz2rq2vU63U/ihRlZuYSIyMjJJNJarUasVgc13UYHBykXC6TSCQYGhxCi2he1Mlx6XTaNJtNarWa37Mj0Ww2qVQqyLLM7t27xQn36OioB6b8yXMrKyvkcjmq1apwkfTHn2RZ5oorrmB8fJx2u+05hFxw/X4rSZIoFAokk143UKVS4WMf+xh/f9ddKLLMdde9nPe+9z3EYjF/YpxELpdjbm6O2dlZotEopVJJTJnb3NwUE/Bc1yWdSjMw6DmLTpw4wezsLIlEgt27d1MoFHj44YeFY2z37t1cccUVfhG9VyRfr9eZmZlB0zQBES+ffBicnAQRuAC6BcDAMi0s28LsmbTabRzH3gElvQicF500TZN6vY7rukSjUSqVCoZhkEwmxX3KsoymeZBS13X27NkjYpWqqlIoFAVYlGWJaDRKLpej1zOJx2PUanWq1QqpVIparYZt22SzWRRFYXh4mLW1deG+C+KF8XicRx99VEzz8brOeoBLIpEglUoxMDDIwECJbldnaWmRer0ujv1SqcTw8LCIFXpgyutdW15eJpPJMDY+Tr1W5/Of/xyzs3Nomirgjtfd1WXPnt1MTk1RKpZIpVLMz89x6dIlP1KYZ2VlmUQiwaFDhxgeHsaybBKJOCsrK1y6NIM3gW9bwTS9o0eP0mq1vB6rXo9mo8nyyjKVcplWq8XG+gbzC/NPm44ZHMPBsaAoiiirLxQKlEolATA1TWNyahfDw8MiDmqZFooio6gq9VqdTrcjOugGBwaIxeKAy4EDV/CiF73o+0Kon2RIFYKpUN9LLzSYChUq1A+v8HU9VKhQoUL9NOqHjvL9r15Ncl2XxcVFut0uV1xxBZlMBkmS6Ha7nDhxgpWVVXHS7XVJaUSjMZJJ78Q9lUoRjyfQNBXXxXNHbWywsuK5W+r1higSD8YP67o3CaxWrfLQQw8xMzODYfQYHh7mF2+5Bddx6Ha7rKysMjc3i+PYNJstCoUou3btwnEcSqUSTz11hqWlRW644QbGxsd48sknGR4eZnR0FMcpiN6bAEwF3Ue6bjA3N8fx48fZ2toSHUOvfe1rmZmZYX19XYCes2fP0ml3tifm+R8PPfQQc3NzXHXVVd5I53ZHgJ2gtypw99i2za/+6q8yMjKK4zi87GXXei6SToeBgQEGBwdRVZXx8XEymQzLy8s0Gg0kSWJxcZFCocDY2BjHjh1DlmRv+/UMTpw4wcLCAslkkr1795JMJnnooYdQFIVcLs+ePbvZt28f0agXx8ukM6KTJABihmEIp5IXjUO4f/pHWVuWJfZb4PDyomJxsrkcjmOjKIo37c7oifJ08CJ9iUSCSqVCu90mmUySyWRErDMajYpoYCwWJxKJsLi4KMruTdOLnXlQycI0HQEIvZM6iWw2QywW9SN9PeLxGLbt9JWmXz4G3XO4HT58mIcfflhM6TP9Ivput+t3c+UAyGQy7N+/3wON5cqOSOr4+DjJZEpsp6D4PJ3O0DMM4vEYb3zjGymXK8zNz/HVu79Ks9nwY26eQ27//itot1sYhs7c3DyO65LP51lZWcGybL/8Xmdrq4zjOKRSE8zOzmKave0per47Sdd1du3aBUj0eiam2aHRaLC+vsbm5ibnz5/n7NmzVCoVAR2DfRVMsAy624IYpuM49Ho96vU6a2trjI6OMjw8DMCF8+ewTJOpqSnvtUKRMQyDdrUqAKYiKyQTSSLRKC4usiT5ceHt17DvBqC+37+HChUqVKhQoUKFChUqVKj/dT0vU/n6FZzslctlZmZmOHToELGYV5Tc6XR48MEHWV1d87ukgoLzKPG4F93zJt6l/Oicgus6WJbNysoyGxsbTE5O0mw2/W4cb8LY9PS0KBy/dOmSKMeMRqMMDg7S6XSQvZIe0uk0Bw9mOHDgCq655hqeeuoppqenWVpawjAMVlZWSCaT7Nu3T0yAm56eptvpkk6lyWSz7Nu3T4wYbjQaNBoNksmk+Pr++7+N49i0Wi2mpqY4deoU2WxWOBwSiQTdbpd6oy5O0C1/ilmv12N2dpbFxUWOHDnCgQMH6PV66LouANXm5iaFQoGBgQEcx+GXfuldrK+vs7i4SLFYxLZtZmZmeOqpp0S/UyqVIp/PMzY2xtDQEPv378eyLGq1GnNzc5S3yiyvLLOwsCDA0L59+9A0jUceeUT0Q+3du1cUqSqKTKFQQDd0AcyCTqQgymgYBqoqC0Bkmiadjgfkgt+PRqOimD3owvJcNgbVapVSqSTigEGZvdcz5Ip1C6BPNBoln8/TaDSwbVtE+lRVIRZLkcvl6HQ82Be412KxGJlMhpWVFSKRiCjd39zcJBqN4bqOfxvH7zizdjjC+hWMvQ6KWk+cOMHp06eZnJwUvVWdToeZmRlMs8fU1BTZbJZsNiueM/9/9t47TJL7Pu/8VFXnHCfHnY3YXQALgEgkQDCKlEiRlkRTMi1LOp9t3VHncA+t52Q/d49l38nHe0xLpzMp35mSqQeyLFkixShmUgSRiLw5zs7s5Jme6dxd3RXvj1/Vb3oWiwVAgYIo1vs8g53QobqqutG/T7/v+7VtMRXv6tWr5PMFstmM7BkTj7NLp9MlFouSTmdIpzOMjo5gWxZf/vJX6PV6HmxRaDQaDA0NsbCwgG1bZLNZul2dXq8vXVZ+P9ns7CyXLl2S47sBCfeSXoQu6znjms2GLLI/ffo0J0+e9GKErgS2PlD0j73YfpN+v7fHOeX/rdvtsrCwQLvd5uDBg2haiOXlJfE8Hh4mEo7QNwwANE3FtFx5riqKAq5LNBYj50X6rn9NCgBVoEAvlt/x1ul0ABHDu1mhs/9adLMPr/zIPIhBH4b3vA302knTNEqlkoT/3W6XRqPxOm9VoECBAgUKFCjQjfWqwdT345S6/jq9Xo8XXnhBji6ORqNywsn6+jqKqhAOhQj5BefxuOeSSpNMiu4dAMcRU+xWVla5cuUy2VyO5eVlut2unNSWzWa5/fYTrK+vsbS0JN0rMzMz1Ot1r+tGTCgLhbQ9k8VGRkYYGRnhwIED/P7v/z5ra2uMjIwyOjrK9PQ0uq5z6tQparWagBj1Giiiw+b48eMsLy+jqirpdJpqVUyBi0QiVKtVLxooolibm5vS3VUsFtna3CKRSJDJZGTvlK7rsgDcNEx0Xeexxx7j7Nmz3HXXXYyNjaHruiwA397eJp/PUyqVqFQqlEoler0e586dkx1QIyMjzM7OEo1GZa9Sq9Xi1KlTNOoNWu3dSYT9vsH2dkXGyQ4dPIRl25w6dYpoNEqhUGD//v1MTEwQjUZxXZdisYhhGKyvr3tOHtFlZBhi2mGxWJQQzjAMut0uju0QT8TllDu/fN5/7P40vlgsJuNiyWRSgs1OpyMnFSqKgoJwWA0PD9Nut2m1RLfF8PAwti0mG2qaADCidFuVReGrq2tUqzuMjo4SjYoOsVgsRrPZlBFLv5PK33+AhF2D8ru6/Gij60KhUOCOO+5kfn6earVKsViUpef+hMBer8fBgwcZHR2V+9afiqioKrVanXw+z8jIMJFIBNOySGcyRKMRbx+oXi9WhHvvvQ9Q+OpXv0o0FuUNb3gDsViMVrtFt9sRcDGX49q1JVRVkdOWej2dTCaD67osLi5iWZaMxQ4PD5PJZOj3+ywvr1CrVQGF7co2Z8+e5bHHH2Nra0t2R/mwEXYdcoPdYf4i2LZteR1fPqza2dlhfn6eubk5VDXM8vKSF+9Niw4vLYTpmMSiMeJxcQ6JyCvkcjnpoPRvc/B16mbwKShKD/SjqJGREd73vvfxO7/zO2iaxq/8yq/wsY99TMadLcuS02QBfuzHfoy1tTVOnToFCHjt/3/E10//9E8zOTlJrVbjC1/4goztB3rtVCwW+Rf/4l9w9913s7S0xNe+9jUefvhhDMOQQ2J8h7HvegXk66P/c6BAgQIFChQo0F+FXhWYerVQ6kaXdxyH06dPEwqFSKVSpNNpTNPkueeeZ21tHVXVpCMlFouSSCRJp3eje2LB78qYz/r6OpcvXyaVSmJbFpbX4QNi6l4+n6fb7VIsFhkeHpajg69dW6Lf73nxvzX+9E//hDe96U2Mj4/L/iFFUWg2m1y+fJkTJ07wEz/xEyQSCdF55TlJKpUKR44ckSDs0qVLtFttcvncLhxRFNLpNEtLS0xMTFAqlVhdXZUuLMMwSKfTNBoNKpUKsViMofwQ1WqVZrPJ1OQU7Y6IwnU6HXZ6O7JIe2Njgy9+4YuMjY9x1113USwWvVL3DemqKhaLVCoVRkdHGR4eJhqNyrLuarXK1atXWVxc5OTJkxw/fpx8vkAsGqXT6chC642NDRKJOPFYnMNHDtNud5ifv0I8nqBUKsrJOAJy2QwPDdHr9bh69SqRSFQCJV3XJYCIRCJkszlaLTGKuVAoyEJqXdfp9XqEQ2Ei0Yjs4vKBgO+oSiQS1Gt1FFX8XtM0MpmMmL7nuaciigBG6XSaXE6n2Wx6XU8JotGoAGKOA1goSshzcYWka8+HO+VyWd6vvxDznTwgytz9wnwfTPnHv9vtStAWDodJp1P0ej2Ghsr87b/9t3EchxdeOInrOoyPj8uuLEVROHXqNFtbFaampkgmkxw8eJBr15ZYXV2lVquh611arSZDQ8NYpkXc69zSNI1+36DT7aCpYqLliRMnpFMtlUrhuA5ra2sYXjSwWq1Jx1fUA6eWN9nw6tUFSqUy2WyWZDJJPp+Xrof19XW2tsQI7kajwTe+8Q1OnjzpFe8jj004FCYUDkno5Reb+/vLcURc0l8U+dOTfPlwant7m2QyycTEBJZlsbq6ytzcHLgQCoewHZtkIoGiqiiqgouLAoyMDMvbGHRl+efVK3VHBS6qQD8qUlVVOiIBUqkUQ0ND/PIv/zLVapV4PM7v/M7vUK1W0TSNt7zlLSiKwsc//nHe9KY3MTY2hm3b/H//3/8nnVa5XE66kdfW1l6vh/Y3WltbW/yzf/bP+O3f/m1+7/d+j06nw7/4F/+C3/iN3+Bf/+t/zcMPP8y//Jf/kq2tLQqFAr/+679OqVTiF3/xF1EUhd/8zd8UE5EDBQoUKFCgQIH+CvQDi/LdCEq5rsvKygrLy8vcfffdMtZ0+vQZ4S7SVCLhCNFohFgsTjKZ8KJ7SekuElBAlCw3m02uXLlCPB6TU+ZcF9rtDq4rOmzS6TR33nkXzWaDixcvMjIywvj4OAsLi9TrNRzHodfr8b3vPcXp06e5/fbbefDBBxkdHcW2bR577DGGh4clGLEsi2azhePYpFIp6XSybZt4PM74+DiqqmIYBq1WC8dxxCLfc4XNz89z+PBhZmZmWF1dpd/vE4vF6Pf72LZNpVLBcRyvPyhJr9ejVC6TyWRkabo/Stl3EjmOI+HS9PQ0J06cIJPJcO3aNb7yla8Qj8c5fvy4nPi2ubnJyZMn5XX9Hqfh4WFyuRz5fI5isciRW47Q7/d55JFHSKWSxONxjh49KmKBKyskk0nK5TL79++XXV69Xo9wOMzG5gZXrlxhfX2dffv2MTMzQyqVYnl5mV6vxx133EEsFsO2xX70J+0B3vS1qPw014/k+RHNQdeNHyXzJ/GJ6YcGvV4fMYkwSiwWR1EEqPTjgI1Gg36/RyQSIZMRHVji/hXCYRGf9IvOt7Y2UVWV4eFhz0HUk58yW5YoNPdLtnu9ntyvojtLQLiZmRksy0JVVTkmW8QUY5RKJQzD4K677uTy5StUq1Xy+TyGYbC9ve3tV51qdYe5uf0Ui0X2758jm82wsLBIrVYV0c9GQ0Y1BWhTiETCqKrC9vaOF4NreaWpGVzXYWtzG9d1KRVLKECz2ZAgLhqJ4LouUxMTrK+v02639kxQ7HS7aJpGu91he3ubVqvNlSuX+fKXv8zy8rKEP4MTEMXxiEnXWywWk3/3IZ7fK9VsNtnxCtOvn9jlOA4bGxvkcjlyuRztVotarUY8ngDXJZVMoaqKmOjpvfZEvHL/G8UrA0AVKNCNNfh66z9XfLD827/927ztbW/jgQce4HOf+xy2bfPcc88xPz8PQLlc5t/9u3/Hj//4j/Pggw/ypS99CYBHH32U4eFh3vve91IsFnnkkUdet8f3N1mO40in6sLCAtPT07zpTW8inU5Tr9cpFAp8+MMf5p3vfCcf+MAH2L9/P5cvXyabzfJTP/VT/Nt/+29f74cQKFCgQIECBfoR0Q8ETL2Us6rZbPL0009z6NAhFEWhVCpxZX6ehYUF2SW0d+qeiO6JXiEBFjqdLo1GnUajwfr6Oo1Gg1KpjG2bA7GzPuGwH/cTb6rHxsYYHh5mbX2dixcuMD09xfT0NCdPvsDi4iKuK4rP5+fn2djYoFQqkclkvCL1BuVymfn5eS5cuICu66TTaQ4dOoRtOzSaTfbPzRGNRuUiWtd1ut3unilwfkfWhQsXKJfLDA8PU6/XJbyZnp7hueeeZWhoSMKOaDTK0tI1bxpbWU6by2QyonfKNOW0N8uyuHjxIleuXGFiYoKxsTFc10Xv6ly7do3R0VEsyyKTyVAoFFhaWqJYLMrtSqVS0rFkWxYrKyucPHkSVVXJZDIcOXKEq1evsrOzQyadZmRkhH379pHNZonH4/T7fXZ2djAMg1qtxvz8PLFYjKtXrwIiEjI3N0er1ZLT8p5//nlmZmZk54jflSWKx9syzuU7rPy+IN+1JCOA3htw4cKJ7plY5ziivFxRVFzXkc67RqOBrvcAyGSydDpt73J4ENKWTqlGo45pmkxOTsh+LDEJz5bwxYeLgBcNtCVsATz4ZrO9vS1BTTgcwrIEaLUsi+npKRYXBWgsl8uUSiU6na43vc6m2z3F5OQkU1NTlIeGiMfjXL16lfX1dQzDpNvVuXDhIjMz02QyWVRVkUB0eXmZkRHhmLNtm2q1QWWrQjKVpFQuceXyFTqdDp1Ol4MHDwCKdJ9dvnIFo29Qr9UJRyKkUkmi0SiVSoVKpcL6+jpnzpzh61//Oq1Wa8/EQ7+EPplMkkwmZaTRh4TXx+n8hVQmk6FYLLK+vs7m5uaeuBBAv9+nUqmIKGdUpVKpMDY+TpTowD5XvcW0Qz6XIx6Pv+Tr02sBqF7JZQMF+mFSvV6nWCwyOTlJoVCQED+fz5PL5SiVRJejL9u2yeVybGxskMlkyOVyFAoFFhYW5GXC4TAvvPCCjIEH+sHLMAy+/OUv86/+1b/iox/9qHwvMDs7y+zsLLVajWq1imEYPPfcc1y+fPn13uRAgQIFChQo0I+QXjMw9XIxP9M0ef755wmFQpTKZVzHYWenyuXLl1EUiMcTXnRPTN1LplLE43E0zzFj2za63mNh4SqdToeVlVUsy/QWssJNEYvFcRzbi9Ahe4N8J044HGZmeprxsTGWl1e4cOECExNikT86OsrOzg75fAFFUTh37jyhkMYb3vAG1tfXKRQKXLp4SfQJeQtky7LI5bKyY2Nzc1NGqEDAjVqtTiQSRVH6stjcdyglEsIRFY1Geec734llWbTbbdLptCw998uw6/W6fNPY7XZJp9NEIlF2drbllDMfzpimyfz8PFevXqVUKnHixAmKxaIHDFTy+Ti33HILBw4ckD1NvV6PbrdLOBxGVVUWFxdZWFggFAqRz+eZmZnh7Nmz9Ho9crkcY2NjzMzMkk6n5GJ/dXVVusQURWF6ehrbttnY2GBpaQnTNBkdHWV5eVm6pU6cOCF6hlDkPvOBlO+s8YGE727zp9KJy4eJxWJeobaKH/P042DCPeSiaaoXGxOuN39B1Gw2JUAUYKwvJ875Lp5QKCRh4cbGBqFQiPPnz3PrrbfJnjN/qpzooRJuL38/DEbH/PvY3t4mFNIYGRnxHE01Eok4xVIJwzCJRCI89dRTTE5OMj4+Trvdptvt4LouCwuLtNptZmdmSSQSHDhwgHg8zsbGBq1WC9sWxfoTE+NooRAKCgcPHmBiYpxWS8A3/5xKJOJMT02zsrKMYRgkEknyhQKhcJhQSGNiYoIXXniBrtfdVa/XCIXCHDx4QALIRqPB448/ziOPPCLjkz54i8fipDNpstmsgI+pNOFIWEJF0zRxbAd7AOL5ET8/5jc1NYWqqqyvr7+oEL1er9PviQmLut7FMsW+QxGdUlpIA883NTo6IqGmf1/XQ6RBIPVq+qde6rUwgFSBftjV7XZ5+OGH+emf/mn6/T6f+tSnPKdkmw984AM0Gg2++93vyss/+eST/PRP/zRnz57lG9/4Bn/37/5drl27xpNPPikv47ouf/fv/l12dnb4kz/5k9fjYf3I6JFHHmF7extVVbl8+TL9fp9HH32UWCxGrVbjXe96F4Zh8F//638lkUjwC7/wC+zfv5/HHnvs9d70QIECBQoUKNCPkP5KpvL5pcnXrl3jDW94g1jAKwqXLl3CsW0BoWJxEskEKc9VEY360T17IHJmymgPCBdNKBSiVCqRTCYJh8O02210XZQ1t1otzwHSJpVKyjhCJBJh//45pqYmeeaZZ3nqqe9x6NAhms0m6+vrnDhxgltuOYKmabRaLba2tsSCW1WYmppidXWVSCRCp9ORjh7LEot9EUsK4Ti2B2lsNE3EzPwycL+fyjQNCoUChmFw/vx5EomEjHBVKhVZqt3v91lfX2enWsX2psxNT0+TTCYYGjrCuXPnJJwanIbkQ6HnnntOlkoPDQ2Rzd5GIpGg2WyysLBAoVAglUpJ18/y8gobG+uoqsr4+DipVIoXXngBTdMoFApMTU0xMTEhnS/hcJhqtSohjmWJjqJcLken06HbFY6fSqUii8iHh4cBZGxN13Wef/55otGo7OzyI12NRgPbtqUDJxqNkk6n5fEcBFEiWuegaSHC4ZB0Tvn7RUAuBVVVCIXDFAoFotEYrVYT07S8YvLdLixflmURDgsItrGxQTqdlqDIsiwJ5/xoIQgYqygKrVab5eUlDh48SDotII1lmdTrdZrNJqVSCUWBSqXiuQMdkskko6NjRGMxLl26zOTkBIVCwZts51CrVmm3BHwql8uMT0yQyWTZ2tpka2sLyzLRdQEwi8Ui4XBIAj7Lsr3zxWVqaoper0etVmNnZ4disUgmnUZVFObm5lhcXJSwyzRNTNNiYmKCVCpFrSZg6Wc/+1meffZZCXz8yKno9MrJf/0YY7vWlqXx15+zgJzG6e9vTdMYHR2l1+tRrVbl5fzpi+1Om1TaO3+NPiigoKCoYltc1yWdTMkplYMDDgZv60ZF6/73/uvY4M+v9LXv1V4nUKC/bjpz5gxnzpyRP+fzeb72ta/xta997UWXXV5e5rd+67cAWFtbuyHgePrpp3n66ad/YNsbaFc++Eun07znPe/h//q//i/pWP785z/PJz/5Sfn/hmazyUc/+tHXc3MDBQoUKFCgQD+i+kuBqVdaht5qtbhw4YKM5jiOy9LSNQzDIJVOk0wkvKhPikQiTjgcBsC2rYG+HtGZ43cROY6LZYk3U/F4nHg8Tq/X48yZs5TLJebm5tjc3GJp6RrXrl3DsmwPQkT2LEhLpSK33HIL4+PjxONx7r77bllk3ul0ePbZZzFNkwsXLjA6OgooXL16VZaHp9Np7rvvPprNJteuLdHt7oKYmZkZSqUSpmkSj8eJRqMStiSTSTY2NnG98ulsNisjdq1WS0a5fAdKKBSi5XXudLs6Ozs7HDlyRPZM5XI5stms1wXUlscmGo0CsLq6KruoFhYWOHr0KGNjYxw5coTt7W22trbkxLOtrS0URWX//gM0mw2Wl5dJJBIUCgVmZmZkybm/3yuVCrlcTkyKUxRcVxTk+i6ZRCJBtVqVt+26Yly470La2dkB4OTJk6ysrHLo0Dne856fkF1diUSSSCQsO7IGQZQPfzQtRCQSFlE/VQUvDub3Tom4I2iagFuhUAjbFpAin8950/YaGEZfdkv5+9GPlpmmKacA7uzsSOeV32XW7XbllLnd/i+XaDTC5OQUhmGwsrJCKpVG0zRSqRSdTodYLCZjlo7jks9H6PV63H7idhRgu7DDxQsXKBQKjI+P02q1vDJ1h6tXF9B1nZGRERKJBKOjo9Jd1O/36Xa7tFotxsbGZal5u93CNE0KhTzJZJKLF58TvTGRMImkKPcfHR1F13V2dqp7+mXm5vYxPDxMt9tle7vCH//xH3P27Nk9cctkMinPx5wXn+v1enQ6nT1xR98d5feD+X/zp/XtHn+xTeVyWbrpfNDjOA6dTkd2uXW8Y6aqoqdM9eKbo6OjaJomb38wXvn9uKcG//5KdKNeq0CBflhVq9VuCKUC/fVVq9Xi3/ybfyN/bjab/L//7//7Om5RoECBAgUKFCjQrr5vMPVKoZRt2ywuLlKr1bjnnnsJhyOsrq7Q1XWSyRTpdErCi3g8LuN3g8XefvFxKBxi//4DnDp1EtM0CYU0OfEM4Jvf/Cbz8/MMDw+jKCoHDuwnFotx/vw5rl27xvDwMKOjo6TTaTnhbXt7m3379nm3F5bF1LreQ9e7JBJJbNuiWq3SaDRoNpusrKzI7W00mxQKBc6dO8fVq/OyGyoUCtHpdBke3l0MNxoNLMvCNC00TSWfz7G0tMT29ja9nug60nVdxJP6fXRdJ5fLEQqFyeXyzM7O0u12Ren7/DztdpuZmRmy2SxjY2OyWHpnp0qz2aDVamH0DdbX16UzxTAMlpeXWV5eIR6PsX//fiYmJtB1ndXVVTqdDoVCgUKhwOLiAo7jkM1mKZfLzM7OUigUCIfD0i1Vr9dlP5gfI3Qcm3A4TDKZlMcymRSOte3tbc8JZmIYJrlcVjqWPvCBD3D23DkuXbzIF77wBd75zneSy+W8QnEHw+hIp004LKJeiWQS1Vvk27aAR4bRl71bfixM9EqpErKI/SFAFUAqJeBXs9mUxyKTyXgxPwPTNKRjKxaLcejQYZLJhOzE8qN7/vmr6zqGYXi9Vbb3fBHTHLvdDgsLi4RCAnIJ4CUioM1WC9MwvNsV8dPxsXGKhSJnz55hfv4qExMTRL2piZGI6Hnq9XrE43EJdBRFkUXsoVCYTqfN0FCZdrtDqyVKzGdn9/HEE49jevG32dlZcdwSCXK5HBcuXCASCWOawuU3PT3N0NAwvZ7O5uYmf/iHf8iFCxcAZHQvlUpRKBTI5/Nks1mvy6oqS+t915r/nLj3vvtIJVOYlkmr2WJjc4Nri4v0ej0PHtqyiD4cDpPNZtnZ2dkzVc+/bUDCrVg0SkgL4bqOdw5n5GV8iAa8yDnl/+7l3FODr4HfD2QKQFWgQIECBQoUKFCgQIECCX1fYOqVQim/A2ZjY4N8Pk+pVKRardLpdIjHYl5pdoJEQriJBp0m10Mp7wbZ3NxAURQZJTMM4Wz6zne+w9LSMkNDZTKZLJXKFuFwiImJCW6//QTnzp3l8uXLXLp0ieHhYS/eFObq1atsbW2hqiqlUglVVcjn86TTaW8CX5NqtUq5XBZgpVKRTibDMOR0NhFfilGv1yiVStTrdVqtFrFYlHq9LqJ5to1j2+TzeSYnp8hmM0SjUebm9mMYBuFwmJ2dHVKpFAALCwuUy2XpUMrlcpw4cYLLly+zubkp3TyZTBbLsun3xSjuTCZNqVSk2WxSqWzTbrcxjL50lQCy32dtbY2LFy+Sz+elK6rVarG4uEg8HieXyzM6Osr09BSZTAZN0+Q0NdM05b7xHUyGYch4mwA2YRyng6qqcvre9va2PMaG0ZdxwF6vx63HjzM6MsLG5ib9ft/rjlLo6T1inhOnbxi4vnPKOw679xeSpdqaFkJVFQlDBBCzAVfGxUKhMCAcUdFolHw+7/U5dTFNk3Q6jaZ16XQcOSlQURQvcrcLJnanQu6NpIlj7qAooOs92a3UbDYwTZNwOEK9XqdWq1EuD3luMOGcSqWSRKJRQppGLBbljW98I2fPnuXcubMcOHCAYrFEu92SIG1lZdWLje7uV4Br164RCmnMzs6ysrIGKBw7doxnn32WTqdDKBQikUgQ9TqdZmdnuXz5iuwbC4fDjI+Pk88XcBybtbW1PVDKj4kmk0lKpZKMhna73T0xTB9M9Xo9TNMUXXKJJKGQJiZyFsPkcjkc22Fh4aqMvfpwU9M0Ca8Hi9D9cw6U3e6xcBgXcR5GvfJ1HxAOHiN/u3xANQitBo/tS7mnXup3r1avxW0EChQo0KCSySSAHCYByG7AZDJJNpv1ot+WvM6EN4XVf82NxWJ0Op2X/B6Q3ZmGYQDISbfXT1O9mcJetN4fOOPf1stJ0zSKxSKu69JsNun3+y97Hf//Wf72p9NpWq3WK97WQIECBQoUKNBrr1cFpq5f1N3sciDeDK2srBCJRGQESddF3ClfKJDwRsb7i3wfHDjO3vgeuPLvz7/wAsNDQ0xOTmJZFq1Wi4WFBa5du8bs7Cz1ep3t7Qr9fl+WVw8PD3PrrbeRzWaZnxcTzHZ2dgiFRAnz0NAQpmlSqVSwbZutrS0vqrRNNBpDVRU5PW9zc5NUKkUsFmNqakrEhVTVc4xESKXTGIZBPBanPDRENpuVrqhSqUQ4FsN1XSqVLRQFWfC9srLC7Ows09PTpFIpWq02i4uLbG5toes6MzMzaJpGOBzh8OHD5HI5ms2WhGm63iWXyxGJhKnXG4yMjnL06FFOnjyF4zi0Wk02NzfRdV2+CRXunS6hUEg6VBqNBpoWIpfLk06nGB+fYHx8jEQiIWEOQK/XY2trCxBTm5JJUeTuQ4Ld8nFLTtJzXXBdyOYsGt7+tCyLfKFAJByRcGp0dIzp6Wm++93v8uSTT3L33Xdz4MABue1dr7dKUVTC4RCRSFR2ZPkF6D6cGXRO+W4n31HkR/Rs29kT78pkMkQiEQmnUqmU/Nmf8mjbwnEl9qUiYYsfCbNt4RpzHQfXdVAUlXK5RKVSodvtelP9BEjZ2akyNDRMp9MmEokSiUZ48skn6HS73HbrbezbN4vjOLIXLJVKcfr0aWZmZpid3Ue32/HeYLukUinPPSW6mZ599llKpRK9Xo/NzU0ajTrHjh1na2uLjY1NVFW4thKJJIoC+/btY2NjwyuLD3sl9HESCTF1cXV1lT/+4z/m4sWLAF7Ze4x0Kk15qCyje/V6nXarjRYS5eWdTkc+D/w+uGg06g0OsDxg1efq1XmazaYoMPfOM//1xB8sEI1GpUvSf50Rl3FF4bmqgQuO69Bptzl0xwlAHCu//2vw9cx3mfmAyv/X1/WOqR8knBpUAKoCBQr0/Sqfz/ORj3yEs2fPcurUKR588EHy+TyRSIT/9J/+E//oH/0j6vU6tm3z8Y9/XP7/6+d//uf5+Mc/TqfTIZfL8Q//4T/kox/9KHfccQc/9VM/xb/8l/+Su+++m3K5zOc//3kAPvCBD1Aul/n3//7fMzY2xsc+9jH+h//hf6BerxMOhz0ntSM/VPA/cBD/LxUfMhw5coR/+A//IZcvX6ZYLPJbv/Vb8vqDHwL6/z/3rzc9Pc1HPvIRLly4wNDQEL/zO7/D6uoqsVhMTmr23wv67wdGRkZ4//vfzyc+8QlCoRAf/vCH+ff//t/L13Efiu3+P8q+8U4OFChQoECBAr1mes3LzwcXV/7ErrHRMeKJhNf141AqlUkmE3LqluO42LYly5DFon/wkzZXfhq2vLRM2CvYdhwHwzRlhK1SqcgpcoqisrCwIHuCSqUy+/bNkc8XuHjxggeuQjSbDVzXoVwuk0qleO7559lYXxcRNU0jpGnSVSIgknBemJZFNpslFAp5UEMVkSxVJRKNcvcb7mZqSsCzSqXixa4ijIyMoOu6nCQXjUYxDIN0Oo3rutLZoaoK+/fvZ319g52dHUzTZHZ21os3iTdWx44dk84yP47lA4qr8/NMjI9z1113cu7cOcrlEul0mmazKSN1uq6Lk8Bb8Luuy8TEpOfkitHv9yiXSxJc+eDQtnf7gEKhEIVCAdd15e8Gy8b9N3qJRJJWq0mtJjqLcrk83W5HFLrbNsViEUWJk8mk6RsGtm1RKBRZW1vjmWeeQdM09u/fT6fTIZVK74mt+dBycFqfX37uO2z8c9PfV/4bVB8q+m9cB3uIwuGwhFP+udpsNuj1LNndZXrn36BrR1U1eVx2oYroter3DXRd3/NG13VhY2OdqakpWq0Wqqry4IMPEo1G+fa3v83q6goTExPcd999HDp0mFQqxdWr85w9e45Op8P+/fuJx+PexD0bRVWp1+r0+z3GxydoNBu4rnBUiS61GI8+esZ7jCGSySSapjIyMopt23tigT5U9DuivvCFL3D+/Hl5XKPRKJlMhvJQmUKhQCQSoVqtous68XiCw0cOk8tm+eY3v8l2pYLuTXUMh8OkUiksy0RVNUzT4KmnvselS5coFoskEgnRexUOyzJ9HxiJCXz6njgfII8lCriug2kajIyIov3rFxY3AuzXR/oGdb1j6gcFp15qGwNIFShQoFejWq3Gpz/9aY4dOybL63/pl36J8+fP8573vIdPf/rTnDx5kn/yT/4JBw8e5OzZswCyM/FDH/oQn/70p9E0MT327rvvJp1OUyqVuO+++/jTP/1TeV/hcJjp6Wnuv/9+3vSmN9Fut1FVlbe97W3cfvvt9Ho9fv/3f5+f+7mf48///M95xzvewVe/+lXuv/9+Wc4eDof53ve+x8MPP8wv/uIvcujQISYnJ5mdnWV1dZWFhQVUVeWpp57il37pl/jkJz8pP2w4c+YMn/jEJ3jve9/Lbbfdxp133sn+/fvp9/s8/PDD/ON//I8xTZNPfvKT7OzsoKqqdBQrikIymaRQKPDf/Xf/He12m//6X/8rt99+O7feeiu2bfPJT37SG7oTKFCgQIECBfpB6TUDU9cv9Hy3VL/fp9PtkMlm5HS8bDbjFZyLqXs3ckpd71AA2NzcotFosLW1hWmaJBJJHNvGsgRI0HVdfrrmuzLq9Qbr6+v0en2i0QimaaGqKr1eH8NoYVmmjIGl0xlsS0AngKgXCUun0zKS5LoOqqYRjUTkxDDxCaBKt9vFMAxipsnS0hJjY6OYpsnIyIjsWPILr8UUN+j3hctoamqKSmWbpaUl8vk8IyMj8pO7SsVmqyJcYPv3i94s13W9KKPhlY4L2LS8vCyn4X3jG9/koYfezOHDh7ly5YosnxeRqAS2bbG2to7j2MTjCfbvnyOdzmBZFrlcjkazwebmFtFoBHEI3D1F0eVymXQ6TTwWp9MVlni/zFrv9ej1erKw3DT7qKpKsViUn5gmEnFarRbtdodwOIyiqBhGlKzX7TQ3t4+ZmWmWl5dZWlrCdV2OHDlCs9mUcMyPz/lT3HZBlILrigiYmNZnSRDlQ0YfRvnnqw8vfNjof+m67jm0FA/YNaWbz79vwzC98u8wmqbS7zvi3O906ff7jI+PebBHQfUiaf5tuq7D9vY29Xqdqakp0uk0KyuruK7Lvn375PYuLCx4zwl44xvfyL59+zh58iRnz55lcnKSyckJCR13drYpFAreG+48sViMaDTC3NwcjzzyXVkQ7rvDxIKjyMWLF2V/VyQini/druj2+uIXv8hzzz23BxAlk0nK5TLZbJZwOEytVqPb7ZLNZrn11ttIJBOyYyociWCYpnStZTJZQPRCPfHE41y6dIl+vy8BkwCaCVlu7kNIP045CKT8n8X3wjtVLBY5cOCABJevRDeDU4OvRzfrnfpBQKRgul+gQIFerQajdMPDw0xPT/MHf/AHvOUtb6FSqeC6LltbW2SzWXm5WCzGP//n/5zPfOYzbG1t8cQTT/DWt76VVCrFF7/4RR566CESiQSrq6vyOq7r8od/+If8vb/39/jGN74hX787nQ7Xrl3jrW99K4lEgpMnT/LRj36Uf/2v/zVra2vSceXroYceolgskkql+MIXvkA6nWZxcZGHHnqIb3/72/ziL/4ivV6PVqu15zX9nnvuIRwOk8/n+S//5b/wz/7ZP+Ozn/0sDzzwAAcPHmT//v382q/9mhy0cn3PICA/LHnmmWcwTZP3ve99fPGLX+SOO+7gtttuu+F0yUCBAgUKFCjQa6e/NJh6qWhfu92mUtkmHo8zOTnpuVVcisUSoZBwsPhF4GL6niOLih1HxHJQFBQPiAAyitZstnBdl2QyQSwWI5VKkkoJeCQW06Y3jaxNIiFiRb7rqFAoUCqVSKXSrK2t0m5bsqw6nRKl6LZlEYlESKVSHmyIemXeKcbHx0gmk3Ixr+s6vV4PRRGW9FarRVfX6Z09Q7vdYmRkhJ2dHSKRCJZl0W63vbgZ9Ho6pmmSTCYJhUJMTU0Sj8XJ5rLSUTU+Po6iqGhaiEpli/Pnz3P48GFisZi3WNdkL5VtWwwPD6PrOv1+n3a7zZe//BVuvfU4oBCPx0ml0t5lbXRdp1Qqce3aEvv2zUpYEg5H0HWdQwcPec4Tk16vR79vYFmmjJWJCFgCRVFEEbkX9wOFcqmEMtC/5R/vTCYjYZE/fbFarXk9F6L/SVFUstkMvX4fzbY5evQorVYL27Zpt9sSdgEvckT5jh/fOeXH/cTldjun/Il9tu3gOg4ou1BNXM8HVgKE+PExH2C6roNh+O4sDdPsyJiZuJ64/fn5K1iWTblcwjAMCoUCw8PDrK2tce7cOYrFIqFQiGq16nV+rTM1FaLf75HJZOn1ejQaDTnp0bZtRkZGaDabzM3N8Z73vJfHHnuUM2fOMDMzIzuyMpmMjCMMD4/gODa33norFy9dotkUkDWTychzb9++fVy4cMHbr+Ixu66Lrov44hNPPMEjjzwyMAVRRACHykOk02miUdGl1ul0yGQy3HHHnSQScXq9Pq1mE9MwUBWFSCTqQVmXVDpFo9HkiSce5+rVq2iaJuGpoiiMjo5y5MgRTp8+zdbWloyR+sdoEEz1+/1d4KhqlIolxsfHZHxj8Ji+nF6qDP1G//rbcaNS9Fdzn69UAaAKFCjQ96P3v//9fPnLX8Y0Tc6cOcNb3/pWvv71r3PkyBG+9a1vycvFYjGeeeYZ7r33Xp544gmeffZZPvGJT/Anf/InPP300/yH//Af+KM/+qMXuVDr9Tq//uu/Tq1W4/DhwySTSX7iJ36CT3ziE9x5552EQqLz88qVK0xPT7OwsECxKFzRvp555hn++I//mFZLvHe66667ePjhh7nvvvtoNMSHjB/60If4d//u3+2579OnT/MHf/AHtFotNE1D13Wq1Srf/OY3uXbtGisrK7J6wN/WYrHI5OQkxWKRTqdDrVbj4Ycf5t3vfrc3uKRDvV7nsccek32KgQIFChQoUKAfnF7zKB+IxdPq6hqO6zA7u490OsP6+jqFQp5IJIztOFim+ZJOKR9MKYoqKqYQ0ZxKpeI5nEzZDxWPxykUCoyPj7G8vAQIAJJOpxkdHaNYLBCNRun1et7i32J5eZmdnR0JUizLEnBJ3e2z8h01oj9IZ3x8XLpqlpaW6Ha7TE5OMjc3J+NzruuSLxQYGR6Wl+10OsRiMUqlEplMxiu8DkvIkEwmueWWW1BVlVqtJuN90WiU6elpOp2OhCzlcomrV69y/vx5jhy5BcMwaDQaVGs1DMOkXC6jKALYhMNhhoeHuXZtibW1NYaGhr34ZFi6SdbW1lEUhTvuOEGv16fb1b34nUNX72JZphfTw3MoGd4xcgYcKpp87P56XFEgHI6Qz+fFFD67RyQclqXkkUhEuphERC5EvV6jWq16kNLGskwymQyObVOr1chmc8RiUUBB01Qcx/VgkS67pwYdUSKepkmXjQBF5m53mQuqJqCVD7b8gvRer4/jCHjmO9Nc10VRVfRuF8dxvdu0pVtne3sb13XJ5XKyk8KyLNbW1zmwf798s+zHynq9Hvv27WNycpKNjU2Wl5c9EKWztrZOOp1mcnKCZrMp4WUmk0FRFG/y4o50Bx04cIAXXjgpowaO48i+r8nJSQAOHz5Mv9/n8qVL2LZDLBYjmUwR0kIcOHCApaUlarWaPHcURZHltQsLC3z+859H1/U9UCqfz5PJZojFYgLIdrskEgnuesMbiMdi9Pp9LNui7p2jpmkyPDxMJBLBdV2Mfp+/ePzbrK+vyziFiOEqDA0Pc+z4cbKZDGNj42xsbMhIn98D5cNJf3CCoihEPBdXqVR8kVPqpWCO//vr+6VeKr73SuDU4GvhD9JBdaPHEShQoEAAlUrFm64aoVKp8OyzzwLw1a9+lZ/6qZ/i53/+5/nsZz/LxsYGIF5XvvGNb/Cd73yH++67j9HRUZaWlvizP/szHnvsMarVKp/73Od49NFH99zPuXPnqNVq8naeeeYZdnZ2eOqpp/ixH/sxzp07B0Cn0+E3fuM3eMc73sH4+Djvfve7+fjHPw7A1tYWzzzzDNVqFRAfRC4tLfGWt7yFp59+GsdxOHXqFLOzs3tgVr1e53vf+550QwF86lOf4oEHHmBzc5N2u80TTzyxxz3W6XR4+OGH+emf/ml6vR7/+T//Z8LhMPfccw+dTodTp06xtrbG2972Nmq1Gt/73vde60MTKFCgQIECBbpO3zeYulkJer/fZ2Nzg7HRMcbGRun3DQyjT7FYFIDANDE9MGU7wrFyPZTyo0a+W8pxHDnJrNfrYduW1/0Upt9voKoa73vf+6SbpdVqsbGxwdNPz8sokeu6RCJR+kafaCTC5NSU7FbyF6ThcJiRkRHi8TjZbI7V1RU0TfVcLTXZneBHBbseqIjFYhw7doxiscTx46LTAUTPg2EY1Ot1EokEiUQCxxFdUn4868KFCwwPD8tOHb8YOhwO0263OXLkCO3206iqxj333MMzzzzD8vISmqaysroKrujo6nY7RKNRHMeh3ekQi8W9qXIhEok4IN6Q2c2WB7tcItEo/X4fXe/K/W87IobWarVIeu4qf5paOCxcKaDQ7/fQ9S6RSHRP/BJA00QcK5fLevEunUgkLGNaoktLuHMSiQSZTNqL9bUlEHIcR07p8UvaV1fXhNsmJTohAFoty4OMu46o3TJ9S0Iz/zj6Jemu62BaFn0vguk/Bh82+SX4R44ckbBGSSTo9/ve+YTsPsrn8zKi1u/3JRxqt1oUi0VAvIH2HWOWZTE3N0epVGJ7e5tIJMLU1CTpdJorV67QbsPVqwvoepd4PE673ZYOumKxSLFYpNvtsrW1xbVr1yiXy0Sj0T0AZ2xsjEwmw/DwMCMjI3z1q1+V0cFMJksopDE+MU673WZ9fZ1utyv72nq9HtlMltW1VT796U9LaOW74zKZDPl8XkJfMfnRIORFaQv53d6xZqMpO6GSyaR00a2srHhF68Iplclk5HTMW48fJ5VM4jguuVx2T1zP3w5FUSiVSgwNDcn4ZalYZGRk+GXje4OR1EEQ5RfsDv47eJ0bvQ6+XnAqUKBAgW6m1dVVGbn7zGc+I39vGAZ/9Ed/dMPrfOUrXwHY46Lye6CAG17viSee2PPzV7/6VQA+97nP7fn9ysoKAF/4whfI5/N89rOflX9bXl5meXl5zzb+wR/8gfx5YmKC+++/n9/93d/dA5kqlQqVSmXP/Zw+fZrTp0/Ln7/0pS+9aJv93q1BfepTn5LfV6tVLl269KLrBQoUKFCgQIF+MPqBOKa2t7cJaRoHDx5AVVUMo+85V3aLpy3LYmtry3OBiL85rhjbtgs4RJm0i1jcGYbpTZhry2iXaZq0O20KhTytVovl5RVarSbtdpt2p4PuRdMmJyclONBCIcqlMqFwiDXvTdvuIlbAqZ2dHdbW1tB1nX379pFMJiUsMk0TTRPOjW63K4uhq9Ua165d4/z5cywsLFCv19G0EOVyiWw2x9WrC6iqWAin02n5pjGVSjM9PcWJEydIpVKEQiHZmTQ5Ocnm5qZ01jSbLfbv38/Zs2fF/nAdRoaHvS6gLrbjMFQuYxgGy8tL5HJ5ud9FDLJJr9cjkUgSj8cw+n2MvoHriilzruvg2A6u49L0oMrVq1eZnZ0VPVq6zs7ODoZhkMlk9kTX/NtQFBGhG3ysXV3HdRzi3iTGwQk5g1Awnc6ge/chbkMll8uRyWRotVqMjY1KSATIiXntdlv2hfm3p6piEp8PLfyoX7/fl44tx3FAUdC8snMBKsW5NzExKd18/lQ/ULwJQwKg+p1mtVqNiYmJPVOGNE3z+jiSnDp1iqmpaRKJBPPz82SzWVZX17h69SpLS0usrq6iaRrHjh3j+PHjJBIJtra22Nnp8oY3vIFer0cmkyGdFq67lZUV8vk8q6urhMNhisUC29vbGH0D2xETAYeGhohEIhw7doxHH31UuvTS6TThsCitj8fjVCoVRkZGZMk/IOHbt771LS5evCjhSigUIpFIyKJzP77qP69bzSZPPvEE+w8c4OCBA2SzWVRV2XOM2+02Y+PjXLl8WcLaWCxGKBQSpf7HjxOLxWQfXDQala42/7jHYjEmJiZk3FZVVUZHR7n11lulC9J/HbkRGLoRKPIhlTvwGvRSzqRXC6H+KuDU4Lbe7IODQIECBXq9VavV5Acer0QrKyv89m//9g9wiwIFChQoUKBAr6deNZh6uQWPbdtsbGwyNzdHMpnEMAx6vR6qqsrFq2GYtFpNrl27xqFDhwiHwwJQeGDqRvcl/iQgRr/f4+LFS8TjMa8I2/aAiHBYpdNpUl5fVDweI51Oy46nTqdDs9mk0RAOpuHhYer1+p6F6Pz8VcD1JswJx0Q0GvXcMbsLzM3NTTY2Nuh0ROwNFEIhzXN02SQSSWKxKOlMhkQiztr6Ov1eDwBFVWk1m54bqMPq6hozMzMyDuZPitM0jeHhYQqFgiw6N0wBeHo9nZGRUdLpNJ1Ol1aryejoKG9/+9tZXl7hs5/7LJubmxQKBXq9nui+6vVRVdGF5E+RE0Bp15mmqA6KCz1dPI59++Y8kOQQDoVwPFeN3+UlirWj3uLb7+hxiUQidLs6rVabZrPB6OiYV1qf2NPnlM1mZTeYiC4m6Ha7NBp1QiFRoprJZMhms7RaLRzHka4t/9goikKj0dgDovzHY5om/X7fmwppS2dMKCSKyv3j7nhOMd855V/Ol6aFALEf/MuEwyGazS5N71gORlJVVaNQKGJZJvF4nBdeeJ5kMsHy8jLZbBbLsjhz5gzPPPMM6+vrPPLII2QyGY4ePSqBZygU4itf+Qr//X//35NKpdjYWEdVVYaHh9nY2KBWq8koXSgUolgq0mw22bdvH4qicPvtt3Pp0iVWVlbkJCL/a3R0lGazyYkTJ1BVlUajwfz8vFwsLC0t8fWvfx3LsiS0icVi5HI5uc/9wn/LtFAG+rtWV1botMXEwNtvv52VlRVyuRwJz3HmT7v0XVLhcJiZmRluOXrUg9mikyoUDqOqu+eK64oI57FjxyRo9GHZLbfcImHZ9a9Tg66oG335DqlBeOTDzBvppSJ9/jYGzqlAgQIFChQoUKBAgQIFemV6zR1TtVqdaDTK+Pi4dEb5i32/U8o0DTY3NzFNy1tcikiX67r4Xef++s31LDiKgjcdzsX0pt7NzExTKpVwXUgk4rLUG8C2RfRva6vCtWvXaDZbmKYh43J+ZKlcLjMzM0Or1UJVVSzLIhqL4g6UY6+urnLt2hK2Y1PI5+l0uyQTSaLRCOXyENPTcVRNQ1VUQiHh1hgfn8C2LQzDQFEU1tbWaLfa2LZFNBolk0rhOg7VahVN08jnc0QiES5evEgsFue22271nEXCMWKaFtPT0+TzeV544SSpdJpmo0kmkyYeT3gLbLj11ttwXFe4rCYmWF1do9/viccViRDK58BFQilVU3CsXSjnui6u42A7Lu12S5TCpzPehLkOzVZTlFmbJrYtok779s1i2w7RaAzFKxHvdDq0Wi36/R7JZAJFEcdosGjen84Xj8dJJBL0ej1SqRS6rhONRtne3pawQFEUUqkUmUyGdruDruvE43FEpFA48gqFgoyU+WXtpmmAoqB63UiRSMTrEnJlj5TjCLeeAtIZNdhP5cMWPz5g25bs2fJ7ifbv3080GsWybPk7P4bqw7iZmVkuX75Mo9FgcXGR7e1tbMdhcXHRi1Yq6LoOQKvVErFL28FxbObnr/L+97+Pn/qpnyIej3PlyhWee+45RkdHuXr1KpFIBFVVvYL7FLFYjMOHD9NqtXjuueekyyyRSBCJRJiZmeH8+fOMjY0RjUZJp9Nsbm7SaDTkc+yrX/kq9Xp998UiFJK3ryiKdFUZhiEjfaK7KumN4nY5efIFJicneeiht/CTP/leMpkMX/rzP2d9bY1EIoFhGCQSCQ4fPkyxVMI0DBRFxXFsTNvGME1sz/3kQx1N0+Tx8KHW3NwcuVxuT8RjsPT8RmDKjwf6UGrw7/K58BJl4zfrm7qZgvLyQD9smpiYIJ/Pv+rrdbtd5ufnfwBbFChQoECBAgUKFOhvml4VmLqZW8p3L1QqW+zbN0soFJKLVjF9z3dLGfR6fUzTIp1OYRgmsZgPAMCP78k+GUVB8SBKKpUGIBKJEAppHDx4kHQ6Ta/XY3t7m8XFRRqNBtvb2y8aJ5xMJr3pYTGvXDxCPB4nGo2ys7ODrvdYXl5G13XGx8a8PqNVdqpV9IqO4zooQCqZopAvMDY2iq7rdLtdFhc3pZMHFBzHlq4NRVGJxWK02y25jyxLAKtwOEK5XCaTyTA0NEy73aHX6/H4E48TiYQ5ceIE0WjIe+xietrO9g6lUpFGo4GmabTbbS9mGCMajZHL57h44QI7O9ukUinK5RKpVEoeI0VRcFxR/q1pKrZlo6oCDrVaLbkYFx1eNmtra9j2MrrnnkIBXJfLl68wPT1FqVRCUVRM05DgUFEUDMOkVqtJl0wmk5FuF9d1aTQbPPHEE4yNjXHnnXeiaZo8ln7PlqZpbG9vs729vec4plJJul1ddiJ985vfxLIs3vnOd8qomK7rckqbfy45jiNdU/7j9MvSBYjSUBThznMc24uOCrjhd2P5x0/E+0LSPeVHBy3LlPfly5+SGAqFyWSyFIsl1tfXefrpp7l69Sr9fn9PT9HGxoYsz/d/V6vVeOyxx4hEIoyOjlIoFBgdHZXdHgcPHCQcEeOuk8kkY2NjJBIJvvjFL8rHH4/HURSFmZkZFhYWuDp/FV3XZVTy8uXL8j4vXbrEs889Kx+DoigSSsFuwbo/pU9VVXRdp9Fo0Ov1GBoakhMFy+Uh8vkcV69e5fCRI4yOjqIoCrlcDl3Xuf32E1QqW5w8eXLAySeOQzgcwnWR8AuQ0T5N04jFYszNzXHgwAG5XYPb7EdGr//ej1wOFp4PnieDcGqw22rwtgdf+15pGfr11wkU6K+7REdg91Vf72Ydb4ECBQoUKFCgQIECDeo1cUz5i7ZOp0Mul5OTyUyv5NwZKKK2LJPl5SXm5+flAlVEn+SteV+ilHy3/hyOHz9GsSgW5KZp8r3vfY9WqyXjfLsRNUcsXr2+nEg4TDyeIB6PE4/H5H3WajXq9TqNRkOWbhumSTqVYmp6WpRseyXn9XpdlnKvra2xtHQN23GwLdtz0Lhks1ni8RjhSATVizk1my0Mo082m0VRRD9RKp0mHo+jd7soisLU1BSKonDp0kXpHHriiSdIJBIcPXqUUCiEKG4PMzM7w2c/91kURSEajbK8vML2zg6jIyPE4nEef+xxXNchnxfF4L6ryO9E8g4YqqqJ4nnXJZ1O0W6397iDbNvGth3h8nIcQiGNWCzqxdlc9u+fI5PJoGkhWq2mjFKKL1UWvquqOI7CQSPcLqFQiEgkwlve8haSyaSctKYoYjJbv9+XjiVN09jZ2aFSqYgYJQqxeIxEIo6u67TbbR544AGvb0wshFKptJyC54Mo3xG1C6JCXrn+LlQ1DNODFcg+pWg0JuOEgOw282N8IsIpLq9pok9NdlcxECFDwbJM2ZU0N7ePW2+9lUqlsidi6G9LNBolm80yPDxMLpfj6NGj3H///czMzNDpdFhZWWF1dZVyuUyxWCQWixGJRDwYWWZsdIw///Kfo3d1wpEw8XgcTdOYnp6m1Wpx8eJFFFWhXq9z7tw56XgC4fD61re+ha7rsr/Jd5v5Ewp94Ow4DtlcjvW1Ne8YiefV4uIiyWSSv/23/zZ/50MfYnFhkUuXLvF7v/t7hMNhDhzYz4EDB3FchwsXznP+/Hk5fVLx3G3RaFT2mPnboSgKhw8fZm5uTg4h8HvZBqGUfzs3glM+mPKdUrtgUR2AYu6er8FjOfi6d7PfvRI4NbitgQL9dVSn0/EmzwYKFChQoECBAgUK9IPRXwpMXe8i0DSNkZERuXA1DEMUGCsKlmXLiNXVq1exbYeJiQkJXVzPweM4PhwRrh3HseXkvnK5TKlURlHEp7jNZssrJI+gabYstvbjYcV0inRadE11Ol12dnZYWromeowsy4u07UaCotEoqVSKVCqFpmrMzu4jkYgTCoU4ffo0y8vL4vZDGiOjIyTiCUIhDcMw6HsF4gLo2KLTqaujaSrj4+Ok0mlCWghFwYM2Da+vClRVY3l5ieXlZdnp1Ol0eeqpp6QjRFUF0Njc3OT8+fOMjI6K6JItOqrarRb5fAFNFZ1UPizzjtSAG2332DmOTSqVls42XwIAqITDKrF4HNd1BsCAWMyLAm0Ri/M7iPweIB8g6LrO5uYGmjYmY2Y+tOp2u0QiEQzDkLBKLM4VWVLvL9Ydx6HRaLC1tSkhn39927bpdrskEklc14dBAsg5jkO73UYLhYh6oMuHkgJEiaimH8sTMCQiO6oGQZEfQxVTGMX9hEIhGb3zIYd//H35HVeOt21+Kbeu67z7x3+cO++8k9/+7d9mfX1dxC2jUe655x4++MEPUi6XmZiYIJlI0u60WVxc5Pz58+RyOQzDkA4mHxql02mGh0eYnJzg61//OpVKBVVRiarCaTQyMkI4HOb555/f4xTzJ1aKfaewsLDA2bNn98ASv0fMP86iAN5hamqKd73rXbiuy1e+8hVOnjxJvV4nFApxzz338OY3v5l2q0232+XEidupVqucP3+egwcPkMtl+fJXvsL62tqLII7jOITDYRm99M8PvyDd72Hzfz8IAgcB6yCMGuwAu/7r+ml815eg3+j17pVG917LywUKFChQoECBAgUKFCjQ3zS95h1T/kQs0zS9jibLW+yJxf3a2jrVapUDBw4Qi8UkGBILSxGNMoz+DW5ZYXfdpnjRohiGYZBKJUkmU+RyObLZLNVqFcM0ML1pab4zqm8YOLaNFgoRj8WIeV+JhHBduK7jdRP1qdfrdLsdrlyp0emIiJ1l2Vi2RSwWw7Ys1tfXMYw+juOiqrsLdn/BPDIi3C4+9AiFNG/qmwBSut5F0zTW19eIRqPkcjnW1tZYXFzgyOEj1Gp1Gd+anJwCXFZWVhkZGUHv6sQ8J0k2l2N6aorR0VFisTiK4k/JAx9GDcajACKREJomAFCn05cOIjERMCwLvAUsVFBVxeuE8iKAtvjb4MIffJfK4CRGl0qlImGPpmmcOXOGdrvNoUOHmZgYF3DHhVA4LI9xOBz2zh8bVRVxyE6nQ6VSodPpUCwW6ff7xOMJolG8SGPCcyTtuq80TfMm8NmYZh/bcXAG4lzhSJiQtrdc2wecwoHlymhhPB6m1Wp67jMR3TNNUz4u30HkQxI/KphKpahWa4ArI2S6rrO5sUEmk+GXf/mXOX36NP1+n9nZWR588EFZMm4YBqdOnaKyXUHXdRzHkedjIiG6xfypjdPT0wwNDfHtb3+bnZ0dEbNDIRqLUi6XGRoa4tlnn8W2bSKRCOFQWDqtBl1tTz75JN1ud08Hk4jPhiRY8/8dGxtjp1rl2NFj/Nqv/RrVWo0nn3iCv/iLv2Brq8KXvvQlxsbHKRYKmKZ4Po6MDFOt1Thz5gzZTIaR4WFvf5sS7PldX+FwmG63601ERPaM+fBosLB88Ofr4dSgO+r6kvpBAHX9v4Pf36zQ/GZOqaB/KlCgQIECBQoUKFCgQIFeWt83mLpR35RlWXICnw8V/AiNaZo0Gg1qtRpzc3OMj0+g6z0cx96zuBQLSHdgsenfn+PFtUx6vR7NZlNGmKanZ7hw4QKRSIRWq8Xa2qqI8XkLaE0Lkc3mpKPG7x6KRKJyyt/GxjqdTpdOpy0dRKZp0jcMdG9CmuOIninLNNna2pIwQriGHLn9hmFSLJYoFotekXdERst6/T7tVotarc7IyAjtdgvHcckXsrLDaGNzk7X1Ne644w40TePq1QXiiQSNep1Go86BAwdYW1sjmUxx6NBBRkZGCIXCCGeUw25qbxBGDS56FUKhsHT8hMO7UTofsokCbEtCAkVRuXz5AmNjYxQKBekuub5I2t8H2WyWWq2OZQnYsLm56W1niDe84Q0DDilIJBLouo7rTfLb2BCdXVNTUx4w3O1s0nWdXq9HrVYjn8/T7Xa8KXNxOh3xveu6nD9/geHhIUBM/ev3m3KCmxaNog2AKMsSIMq2bfH4NZVIOEwymfBKw1XvHDY8ION4+wQcRwA6cS7YN+w4UlWVYrFA0yuN958fIjqnkE5nuOOOO8hmswAsLCzQbreJRqMkk0kJokzTJBwOk06n5XPBMAyOHz/OoUOHiMVifPvb30bXdTqdDo1Gg1QqxdDQEHNzc5w5c4Z+vy+jcZFIhHgiLre33W5Tr9c5ffr0Hkgy6Fwa7Ofy44a2bbO4uEg8HmP/3Bx97/iomkY8FqfdarG+vo4CVCpbHDp0iFazSTwe5+DBg/R6PS5dukS32/WerwLshsMRLMv0YGkI27al6+tGrqjrfzcInfzz9WZRvet/fiWvga8UQL0aR1TgngoUKFCgQIECBQoUKNCPkr4vMPVSCzd/5Hwmk5FQynVdms0W166JCN3Y2Dj5fA7Am5IH4EqHj6J6DhzAMExMw6Bv9NH1HqbpwSLPXeE4DpVKhXa7Q6NRp1argte95JeQK4pCJBImHBYPVUwJNNjZWUXXe3Q6HekU8mNplmUDorhcU1Vse3fxu7m5STKZlNPEQLiTVldXCYfDjI6OUi6XOXToIIZhEItFaXc6NFst0qkUqVSaTDrD+Pg4hmFw4MB+4okE2UyGtbV18vkCHzp2lLGxMXRdJ5vNeotyh2azSbfb5cCBAxw/flz0aYkDgus617k8xL8iKqh4cEWVoE+AGItwOIxtW95lFVQVEdcbuE0/wnf06FHZkeQDl8GupsHfW5ZNPp+TJeTPPPMM+/fv58iRI/IymUwGcaRFoXW326Xf75NKpeU+VhSFWCwuwYKqqrTbHarVGo7jUCwW6Xa73mS/JLouIn5TU5MkEkmvq0oUbfvwybZtel5Hku/I8uNwoXAYzbsf/3wRXWkWprnbBSX2m+ijCoX8CX57u6VE75ZKr9dD0zRyuTztdkuWnYtIXxfXdbwuMNFhtbOzTbvVoW/0mZqaotVqkUwmGRoakgXx5XKZXC7H1NQUuVyOZrPJ17/+dTKZDGtra6ysrBKPx5icnOSuu97A9naFkBYik8lgWcLFmEgk9sAaVVG5cuUKOzs7e1xI4XBYXm4QvGWzWfFcc0UPTbVaZWx8nE63y/r6OpFIhLm5/WSzWZaWltnernD06DHuuedetne2UVWVZrOFZYsplpOTfsxSTCI0TZNKpSLdUqFQiMnJSfmcG+yFeinI9Epfv16JW+kvW24ewKlAgQIFChQoUKBAgQIFerFeNZi6mZsgEomwvr5BMpn0ojKi20hVFaanZ1hdXUUL7bqMXNfFsm1RIO7YmIaJYfQ9J4iJbVsSFIm+Kf82Hc/d4qDruowj+dGm7e0dVFWRk916/T7dTpd6vYZhGJ4rZ4NSqUQyKdw6vhvFBy/WwKh5MW3O8bqkBNxotVpEIhEymQzNZlM6rHRdZ3JyklgsjmmaxLzJf6FQmHK5JGCApjE9NcXc3ByJRJIvfvELPPPMs0QiYarVHWZmZiiVSvT7fVZX14jFREdQLpfj0KFDcjFu2zaO7TDYHeVHHgVM2v158G++YrEY+Xye7e1tLMtGVf0o1O6x3gVTCqq6C6F8J5UPnwaBjCilFiXumUyGbrcrgUK1WgVEB5Tfa+UDIuFe68vi693zKoyipjzY5ruQRHG34ziUSiV5HiQSCbrdLrGYKC1veW6dbDZLoVCg0+lKV5+I58Vl/M8/r4z+oDMqRDQqSsXb7RYhr1A/FovR6/Vl5Ew8L1z8Enj/eaJpKrFY3ANQAuboui4nIPrT7XwHVCgUEtMnXYW5kTlZDt/r9Wi32wCMj4+zf/9+0THmOJw/f57nnnuOAwcOsLq6yvLyMtFohGPHjvHQQw+RSCTQNOFia7fa1Bt1CXYHQZPjitvyp2n5MHbPpDzHBcHsSKczYoamAqVyCdu2+aM/+iOef/55Ll28yDve8Q40TaXf73HPvfcQDofZ3Nzkz/7sM2xsiNeJUqnE2NgoqVTaO/9cXFxsy6LrTTO0LAtd1xkaGiKfz0tIOXiO3ujrpV6vBqOnr+Tn6/X9RvVerQI4FShQoECBAgUKFChQoB8FvaZT+cQCWkfXxXQvFxfbsdnZ2WFlZUUCptjoGLrepd/vCVeU54SyLFv2Uq2srDA8POxNdfO7YMBxbHmfpmlJN0osFvPAhritTqdNPJ5A17vydvt9A9HzozEyIiYC9vt9CSpM0yIcDhEOh1lcXBSOHlWlq+ukkkn5GNvttpw0WK1WpesGhBNJ13uoqsKb3/xmGUPyo1N+cXgymeT06dOUy2VUVSWfz1Gr1STA2draotVqYRgGnY4AEnsjS3gQBFBUFAYW0wq8eDmrSFCFAqpXIN/v9z0n0t5pZdJy5d3u6dNnOHLkMMlkEhEFFJP/wuEw7XbHcyY58vJ+J5EoJk8wNDSEYRhcunSJyclJcrkcrVaLRCIh+5DOn7/AAw+8CUVR6PV6RKMxFAWazSa6rlMsFmm3OxKUhUIharWahFMA0WhsIBoIS0tLfP7zn0fTNB588M3ccecdhMIh8M4lv6jeMAzpwhGQM044HBlwQ7mEQiHS6Qy63pWdV37szHcHDkJCHxAmkwkikTDtdlt2Q0UiEXp6D83rHfNdQf7Uv2wuK8HRxMQE8XicXC4ni79d12Vzc5PHHnuMRr3BiTtOSDg6OjrK9PQ09957L8ViEYBEIonjiAEElUqFpaWlPePc/UL2q1evynNt8JyTl/Men+M4JBIJot6kvvkrV1hZWUHXdc6fO4eqqmxvb5NMJqlWa+RyOY4fP86j3/0uTzzxhCil1zT27ZtjerpFKpUil8+TTqU9d6NCo96QhfKKojAzMzMwLOGVwajBDrTByOnguX6jvw3ug8HbutHvB/VyrqpXC5uC3qlAr7fGxsYYHh7m+eefZ2ZmhnQ6zenTpzlw4ACKovCGN7yBz33ucxKcv5ze//7389RTT7G2tvYD3vJAgQIFChQoUKBAPyx6VWDqZhEYQE5e297eplgsoqBgeU6kQqFAtysm4/ldPpblF6T7RcSOVyztMjIyAgh44E/p2wVUu70xoVCIarUqS67Hx8c94CWmltmOjev2UNUw2WyGXq9Hq9WkXm/QbDWp12pYtk0kHKbfF9GpZDKJ67q7ESZHOGeGhoao1+sCXoTDEhJFIhF6vR7hcJjx8QlmZ2e58847yeVyrK+vMzMzw9bWFtVqldHRUYqew2doaIher8fk5CSjo2OsrCzL/qFut7vnjb7f6eM7izQthKapuO71C9+bQSkBS/yeKb87yf/Z75byIZt/eU0LcejQQTKZDMlkkkQiIUveG426vG2/EFu4jFTi8RiKotBqtWWJdSwWp9ls0u8bZLNZLMsinkiQj8UYGhqSk9/EBEfhgqpUKtTrdcrlMqlUkk6nC7iYpkkmk6HValGpVMjnCyiKSiQSlo6subk5/t7f+3tcunSJ9fV1nnn6ae68805vml9Pdj6J6Y7hF03k6/f7MlLoF65f7zrTNO1F7qNBWZZFPB4nEolQrzdkvDEai4qCcg+QplIp8vk8oVCIZDJJLpcjn8+TSqXk/ZimycLCAidPnuTkyZMUCgXm5uZot9sySjo1NcWhQ4fI5/N7ur/E8VSo7lSlW03CTWBtbY2tra0bwpzre5p8mLm0tMTm5qZ0ul25coVWq8W+fftwHIetrS2effZZpqammJ2d4f777wdF4S++/W1M02RlZZmhobKY2AiyZ63f77O+vkZIE3CzWCzKiZ+Dz4mbOTivfwyaV3Lv/ztY7j4Ipa6HUy/noLqZXgpO+bf9l7mdQIH+KhSPx/nVX/1VfuEXfoFf/uVfZnZ2lg996EN8+MMf5r/9t//Ge97zHlZXV2m32zz//POEQiFOnDhBo9GQ3Y/+zxcvXuQtb3kLi4uLOI7DwYMHOXfuHNvb26/3w/wbp1wuJz/oejWybZvLly9jmuYPYKsCBQoUKFCgQIFurNd8Kl8ul+PS5cvkclkUBekkSSSTbG5usr29LUqqk0lcGc3zO6YEfBJo5UaOCDzXlOvFrFTS6TRD5SGxkN1YHyiVdul2O+AiAYOu95ifv8LW1halUkk4c/p9QloITdt1rpimSSqVkr0/mUyGbDZDKpUimUyytLREv28QiUaYnJxkanKKza1N2q0273jH27nnnnuYnJyi3xcl0Ovr62xvbzMzM8vY2Jh4rB7EqNcbnDx50lt8F0gkEmxubsr9qSgK8XicWq0unTmO4xAOh3EcZe9CHbjR2nUXSg0uspUXdU/50GGPcwq8CGISRQHDMNB1nWq1ugeUhT1Q58M8HxzZXiSyVqsRDoeJRiMiRiY7rBxSqRREo7JgG0R8rN836PV6xOMJvv71r3PixAlvf8TodjvymPnxuGp1B9d1UNW0AD/RqFdEX+SOO+5gfn6e1dVVLl68yG233YZf5u0v+B1HxDX9GKkfY0skEuRyOYrFIhsbG3S7XZrNJolEglhMTIb0p77t7nMRYYvH4ySTSbrdLtFIlEIhL0vQfQDlR0LD4bCMHPogynVc9J7O0tISy8vLLC4usr29jaIoTE9Pk0qlZKw0Go1SKpWYmZkhmUzuAS4Apmly5coVavWaOF9k3DBEPBbn/Pnz3nNnLzTxS+MHzzXTNLl48eIeuOMDxGPHjlEsFjl48BBPPfU9VldXicVjnD17lgcffJB3vuMdWKbJd77zHbrdLleuXOH222/H8jq7bNthfX1DOCCNvues2iddZYPbfiNdD6Suh1LXf70SQPWDUOCeCvTDoGvXrtHv9zl+/DhTU1MoisKxY8cYGhrizJkzZDIZHnroIe666y7+j//j/+CBBx4gHA4zMzPDf/7P/5k3vvGNRCIR+TMI5/Fv/MZvcObMGTRN49vf/vbr/Cj/5qnb7bK6uvqqr+fXBAQKFChQoECBAv1V6jUHU4lEAsMwqNfrZDIZNE0jnc6wvV2h2WyKYnFFodNuE/Xid67nkvKB014w5S/IXOmm8uNStu3Q7XbR9WsA9PsGS0tLItLklVa7jkOn06Hf71OtVun3+wwPD5PJZNnYWEdVFBnpSqVS3jS5Gr1ej0qlIhersViM9fV1ykNDlMtDVCpbDA+PcOzoMUIhTU4Y27dvlrGxcRxHxPtKpZKM6I2Pj4PnIvOLzLt6F8syGR0dEdFBz6kTjyfodNqoqkoqlaJWq5FOZwiFNOqNBtlsFsM0MbwOHsdxZPxusPjc90/tLrT9hbffB+UDKvE3UfqOnDjo92q1220JCCORiAQyfowyGo3KyXHChaQRCmkoiiIBYq/XI5VK0el0hCPKA5I+0Emn055jSmy/D7EMoy/PBRAArVQuYxpiQqOu60QiEbrdLo1GExfIZjKA6KeyLPH4jh+/lcOHDwPIHipQaLWatFotAAnPotG0hGyaplEulyUY9D9JLpfLsucMkI9DuJCEw82yLG655RYqlQqVSkXE9DJZtJBGMpkkm82SyWRkvM93i+3s7LC2tsbS0pJ0E2QzWUZGRpiZmfEAmkE4HJYF7dlslqmpKbndg6DFsiwuXrzIxsbGi6CO4zjsVHc4d+6c3L+DYGawX8pVd59//X6fcDgs44GapvHud7+bBx98ENO0aLVaMmqpaSJ2uVOtMn/lCrlcnvHxcdbW1uRjzWazuK5Ls9Vie7sCCBA6NjbG0NDQnm2+WdxOnHO74CkUCsl/B78fBFODgOpGX38ZvVbT+v4y1wkU6PuVZVl861vf4h//43/M2bNn2dzc5J/+03/KCy+8ICH9f/yP/5EPfvCD7Nu3jze/+c08//zzdDod+fMLL7xAp9NhamoKELD7u9/9Lm95y1tkfDjQayvDMGSnY6BAgQIFChQo0F93/aXA1I1cC5ZlYVsWW1sVb7Kayfb2NteWlpifn8d1RX/S8LBYaGradZ0xuKjKYEzHhygqkUiIeDxOKpUmm81QrVZ58sknJfBIpVIYhoAG0UgE05ucNz8/z9bWFvl8nsnJKVzXodVqY1mWdPoAYmKeFw3zC59FSbTL5uaWmJbWbnP06FESiQQzM9Ooqoptiwl05XKJYrFIrVal0+nQ6XTR9a6cqNbv9+j3+9TrDer1Gt2ujmmYzM7uE6XbhoFtO6ia6nUDuRiGST5fwHVdhoeHMU2TQqHgOa9cunqXdqtFq9Wm2+3smVi420HlR/d2i8N3O3X2ukR8kOX/zodSvV7Pcy/FpVvmvvvul4Xp3W6XVqsl4YyIxoUkWIxEIqiqRq+nk05n6HTacjubzaY81v51/WMSDoflBLxdaAm4rnf7EQAMQzyOer1OrVrFdRzy+fyAi0vDcQR8fOGFF3j66adJpVL82Dt/jHwh/yI44bvsTNNkZGSEeDxOo9GgWhUxuHK5TCaTkdFOPxrnbZyEaKZpUqvV2LdvH+Pj4+i6Ls+7uFeMD2Kq3eLiIktLS6ytraHruuzmOnbsGMlkUh4jH4bV63UajQa5XI6jR4+Sz+df1Lck9o3o9vKdVoMuKh9YPfrooywuLu4BPv7lbNsWBe/sgk8feuVyOVZXVymXy7zrXe/ive99L9lsllarxac+9SnZ33bo4EGKxSKVrS1WVlc5c/q0BEWu69JqtbxuuA5X5+e9iX1N4vE4+/btk0MJBvVSUb1B+OTHM/3zyL/PQUg1GO273jV1/X0N/nz9a+HNuqVupgBOBfrrrm9/+9v82q/9Gv/hP/wHtre3+chHPsJv/uZvAshJp/4HGd/97ncJh8PU63UeffRRxsbGiMVirKys8OSTT3LPPfcQCon/l585c4YHHniAz372s6/vAwwUKFCgQIECBQr0uuo1d0wJ946IH4n4mUY6nZJOqm63y+joKKlUil6vTzjset1G/mILUBQvihUjmUqSSWfIZERheDQaRdOEEyeRSJLPX8SybMrlkgcwHLa3t2m3WyiKSrer024L51G/32d5eUle33f7+IvOfr8nJwYWCgXpalIUhXZblFI3m00WF69x2223ooVCGKZBt9PlyJEj2LbNlStXsGwb13Gl86fdbrOzU0VVFWzbkQ4w27bp9fuYhkE0GqPb7WLbNmNjYyQSCbK5HI16HV3vMjIySjIpup0SiQTxeJxQKEw6naJULGJZNoZpoHe7tNpt2u02ercrp5q5HqVSpWvKhwvKi2CF67o4tkOr1aJeb8iS7Hg8Tjwex7IsxsbGMAxDTtQLhTRM05JOrL0T/cJEIhEJOAyjTzqdEQCv2yUWjVGr1THN3WlwkUgEH0r6UMJ3CPnb6LrsiXeBiJI2Wy3hnHJdisUimqYRiYRx3RCWZXH06FEURaFWq3H6zGmOHTvG8PAw3W6XTqdDt9vFsiw0TSOTyTA0NIRt22xublKriSLvXC4ny9397ifXK8T396evzc1NhoeHSafTdLtdDMOg3W6zsrJCvVZne2dbQr14PMHIyAjZbJZYLCYnRfqAyDBNup0OnY4ogT948CClUolQKES/3/cA4C5YaTQaXLt2Ddd1mZqakovIXq9Ho9FgZ2eHRx55hGvXrsn9PAg8Xhyl3YVetm2TTqcZHx/nve99L29929tIJpLoPVGivri4iGEYxGIx7rjjDqamprh0+TKzMzPMX7kCIHvGbrnlFkqlEufPnweg1+vhui5zc3Ok0+kbxvMGY3qDUdwbwSj/++vdUtfH+V7OKfXXCQYFcCrQX5VWV1f52Z/9WTm182d/9mc5e/YslmXxv//v/zv1ep0/+7M/wzAMPv/5z3PixAn6/T4rKyv83//3/73n5//n//l/2NraolKpMD09zZkzZ17vhxcoUKBAgQIFChToddZrDqZsryfGdV12dnYoFguoqkYsGhWT81yXdDoDHnzo9XvEYzFi8RjxWMzrc8qSSqUkhFJVTQIPXz50eMtb3sqZM6dZWVmR8TG/60aABgEb/Klq5XKZdrtDq9WUC1LYdelcvnwZ14VkUvQHWbZNv9ej2+1iWhbhUEhCCQC922VmZsYrXxdT/wBQFBR8B5LilbjvLnb7fdFl1G63URTRY1UqzWCaPqTS6VWr7Ozs0Ol0pMNDURSSySSlUolSqUQ2myUeF1PfEvE4iXicQqGI44jIWc/bduHe0jGMvoSHqmrKhb04dg69Xo9ms8Hy8jLNZpNCoSBBUTKZRNM0UqkUmldK7R+S3b4r33GjooVCKAiHT69nYBh92p0OqqIQDkfI5/M0Gg05WXFraxPD6KMo46TTaSKRiIAh4B3bkARG3i6WJfWD8cR8Lken05Hl8aVSaQ+AsCyL22+/XS6Ozp8/j+u4jI2P0e/3vVhdlEhElInH43G2tra4cOECruuSSqVQVZVOpyOLun1Y02w2CYfDojfLU7/f59q1a9x11xs4evQo1WqVy5cvs7q6SqvVolwuceTIEdmP5u9vRVEwTVPCLP/34lwpSchiWdaLYE2/36dSqVCr1YjH4yQSCXq9Hmtr6/T7PelWq1QqbG9vy8flx0IH+4x2YdTu1DsfxjmOw8/+3M9x+2230Wo2WVxYoNfv8+ST36Pb7dLr9cjlcuzbt4+RkREajQZ6t0t5aAi92yWbzTI+Ps7Y2Bjnzp2TMUnTNJmYmGBkZOSGk/P889Z/XtwIQg3+/FJOqZdzS71WPVPXTwq80d+/n/sJ4FSgvwo5jsPJkyflzy+88IL8/sKFCwB7puw9+eST8nvLsvb87Ef32u02i4uLP6AtDhQoUKBAgQIFCvTDpO8bTF0f4xsc1a4oCq7jYNsW1WqVfD5PPJ7wOoRiJJMJ0mnRq5NMJgiFwiSTCaKxGNFIFFX1o2c3vj934D/FYpF7772PanWHb33rW1QqVZaXl1lZWZEwSkQKDTqdDqZpEolEZYRKLFJDTE9PE4/H2N7eZmxsjCtX5tnc3ED1Fr+2bROPxbjzzrsYGhoin8+TL+SJRiIkEokXLeYVFFDwOpw0L4Jle51JJv2+cIQkk0nS6TSZTJpqtcr6xgarK6vMzEx75d0G0WiUQqFAoVAgmUxSLBaJRqMeZLGp12v0+33ZhZVIJInFYsRiAkiIWCAyLtjtdmU3iB9RW19fp9loYpjGnkLzaDRKJBIllUoSCoWkM0iUg+8u3AVAcn0shWWZwgVkO6CA5pWDLy4sMDo6Si6Xx3FsSqUS9XqdVqvt9UTpbG1VUBSFVEr0PPnHejBaNujuURQGnHSidNz/uVqtYtvifhKJhIxeWpbF6Ogo+Xxe9k2FQiGKxaKM2xUKBfJ5UVb+7LPPsrm5STwep91uE41GaTQasjDdV6FQ2PNc8FXZqnDq1EmOHTtGPp/n4MGD1Go1arWaKNL3ertc16XdassScr+HSjjIdocJDLrb/P3gui7tdpt6vS7jkZqmoes6elfHMA0JvjrtDluVLZ55+hk6nQ4g3GfpdJpWq7Wn/Na/z+tjgj6kbNTr/MVffMd7virYjs3CwlV5Hk1PzzA0NCRde67rUi6VqFQqcrvPnj0LXkSw3W5TKBSYnZ2VMPb6IvNByBSJRPbAqJcDUv7kxeuh1M36pf4q4E9Qbh4oUKBAgQIFChQoUKAfRb1qMHWzaVjgdROpKorrovd61Gt1AGKxGEeOHCGfz5NKpWUxtutBB9O0wIvy+eXRwmHEdQv9F0/r8zuZ7r//jfzxH/8Rm5ubXlzNlYAlm81Kd0u/38NxHCYnJ8WiPxolnUrjOA6zs/swTZO1tVWazSaKosi40szMLHfffTe5XFbCKr8Me3ARKyJpLul0GgDLMjEtC8d2sCzT6+IwZVwrFouyublJp9MlEU9w1113ks8XME2TTCZNLBb34niiyD2RSGBZNtFolHgiTjabRVVFH5ToemrKPqFkMkEylSIRjxOLxUgmkxSKRfCKxxVFoVKpsL6+josAGf5Unmgk6sGHJKqqyUigP0Wx1xPRMU0Tpem7YwHF3yOhEIl43AMBAiTde+99OI6DaRpep5ZNLpfDdaHdbqFpGs1mA8exGR1FFsIPumU0TcNxnD1xOUAWcdfrdXm7qqqi6zorKyuMjIyQTqe5ePEi+XyesbFxYp5Lz5emaXLa3tDQEJ1Oh6eeeoorV654Rfu6dBaFw2G63e4eKOkb5nzwNzw8DIDjOqysrNBut5mamiIWi8kJfIZhsLW1RTwe94rdSySTSdmt5e93UXC/CwMH+7e2trZot9v0+31ZKL+zsyNL1U3TpNPuoPdEUXuj3uCFky8wf3V+zz70u9V8N5h/vP3JfKLIXJNOteWVFQrFIol4Ak1TiUSirKwsS5eXqqocP36MWDzOysoKLe85lUwmqdVqdDodFhYWKBaLOI6Ij+ZyOQ4dOiTh6/VxPR86RSIR+XU9lBoEU9eXoL+US+p6OOXrejj1SmDVX8bJFEzrCxQoUKBAgQIFChQo0I+SXrMon+980DQNVVFxVRfTm9a1s7PN+Pi4B1WSFIsRb6G9C7lE/4uIWYlCZCSk2i2T3oUi/gQ/fxHuOA75fI6f+Zmf4Wtf+xqnT5+W7h4BQkwvHqV6USaLcnmI0dER2u2OXHiL2NYm8XgCy7YZGx3lwQcfZHJy0ouW7UKyWDRGOBKWDgx/cWvbNsvLy+i6LnuBFBRyuSytlsXOzg6KqhIOh2g06ti2QygUIpVKUSjkZeyqWt3xgFWH+fkrrK2tSVeUH5uLRMIcPnyEw4cPUSgUKJVK5PLCyeUC/V6PTrtNo15HVVXi8TiZTEYWb2uaxtTUFPfccw+PPvoojXpD7rdMNoNftj3olHFdl/Pnz/PYY4/x9re/g0OHDnqLYj/upRLxnGSidNzxit8NLypmyz4gEaFUKZfLhMMhWq2WF4sUUTS/HNsvtR7srvLBmr9NgCwMr9Vqe4BFp9OhVqsBcPToUXkumKaB0TcIR8LydkslAYYajQZPPfUUq6sCUpqGiW3bVCoVisWihC+D8gvCB4vNB4FBo9Hg7NmzxGIxUqkUx44dI5VKEY/HJTj1I3K+48i//iCcMQyDZlNME/Rh1PWl5767q9frSTdUOpOm2Wzy3NXnuHLlyg1Lu8W0QmQU0gewPgyUUNAF0zC4trjI9PQM4XDIG1G+Jt1b0WiUY8eO0Wo2uXr1qiyPj0Qi9Pt9bNumXC7Lkv1MJsOhQ4eku+16l1QkEiEajcqvQTB1vTvq+gl8N3JJ3QhKDb6m3UjSFRrE7wL9CCidTnPPPffw3HPP0e/3eeCBB+RUvWQySblcJpFI8Pzzz7/sB1e+FEXhxIkTTE9P8+STT7K+vs7Y2BhvfOMbuXr1Ks899xxjY2PE43GSyeSeGGGgV6Z4PM4999zDI488QjKZ5I477uCRRx4hl8tx8OBBvve97/2l72N0dNSbhtt4DbY4UKBAgQIFCvSjrNcMTPlvSP2CYdd1SWcyHDp0ENOypDNFUVTv8vKauICmKmia2By/M0dc1i9d3gVQ/iLZXzAPTgVSVY2HHnqI8fFxnn32Wba3t2V8rdlsgqIwOjrK9NQUhUJe9kj1ej0ymQzlcokrV65wzz13k8lkJOzxF7XxeFw+Rl/+gtayBHTa3t72YJhFp9NB13WGR4YZGhpme2cHLRQipIUkuAIBATY21mk0miwuLtBoNBgdHWN6eorJyUlcFwzTJBaNYlk2lm1hGgaqqrG4uEClUqHT7UgnVDqd5ujRYxw+fIjp6WkymSyKghclNGTXkj+xbmJigttuu40XXjhJrVajXq9LSDg6OupBFuFgcxyXF144ycLCAuvraxw8eJDr1yOWZdNudzAtE1wIhTTC4YhX2u5HG10cxwYEGBgaGiYUCtFsNgmFQtJJ5Pd5+Yt5vyg8nU7vcU45jsPjjz+OZVncf//99HqiS8nvfKrX62xvb2NZlozHhcNhTFO42NLpNLlcDk3TWF1d5YUXXiCRSDA5OcnW1ha6rqPZGqVSCVGI394Lgwb2gaIocrvl71AIhUMynpdOi6ii67p0O10c7xwffKw+9BQOtR6NeoNOt4NhGHtcgz5AFB1hTer1Oo7jMDU1RSKRoFKpcOHCBRYWFtje3kbXdQni/H3nP49d15VDAbrdrnBMWeI5piqqfB5atiVdTisrywwPD1Ot1ej1dDKZDCdOnMDoG+RyOU6fOcPW5ia6rtNsNtnY2EDTNBnl7Pf7FAoFDh486HWY7S00j3oddf6XD6VuFNu7UcH59VDqlTikXkovB6xe6udAgX4YVSwWeec738nW1hYPPPAA8/PzxONxPvShD3Hp0iXy+TwTExM899xz8jojIyOMjY1x+fJlOp0Oo6OjhMNh1tbWMAyDsbExTpw4wcWLF/mVX/kVPvaxj/HhD3+YL3zhC7z97W8nGo0yNDSEruuMjIwEYOr7kOu6fPjDH+bKlSucOHGCf/tv/y1vfvObede73kWhUGB7W3xgeP78eRqNBnNzcyiKws7ODplMRk5W3L9/P9euXWNpaYnDhw/L193Tp0/zz//5P2dpaYk//dM/JRqNMj8/z9zcHFtbW7Rardd7FwQKFChQoECBfoj0moCpwU9JB+NFuBCJRKRjKZfNksvlvIJjL5bnXU8skIULyF+I+z1GrguOY2NZNrZtyUlwg18+qPJdWwcPHmRoaIizZ89x+vQpr6AaXMcRvUdAu91hdHSMaDRGrVal0aiTSMQpFgve9YeJxaISIPl9QqLgfTde1u/3B/qC+h7QCWPbdbk4tUyTCxfO0261CHlRtFqtzs7ODqZpcfr0adrtlix/jkQiRKJRpqYmZXxtemoKXddlCfbOzg75fJ61tTWazaaMYbVaLXq9Pn/xF98hlUoyNzfH8ePH2bdvH1NTU0xMTpJKpWRpuOu69Ho98vk8xWKBVCrJ5uYGvV6P7e0d8vllSqWyjB2CH7l0JVgTvWC7Ej1QKulUag8k8P/mH8NIJEIoFJbTAcfGxgiFQtRqNQmnfJeNv99PnTrF8vIy73vf++T55evgwYOyTD2RSNBut73joZHP56lWq9RqNVzXJZfLkUgkKBQKEhK1Wi3Onz/P6uoq09PT5HI5Wq2WmCh36RJjY2MSpviP3d+HolZs737wJz9mMhlyuZyM6w2C1cHr+hP4QEDEVqtFq9WSExsbjQbxeHzPpD5d12WvVNubyLizs0O/3+f8+fNsb4upf3I6I0hAM+hAG4xpKoriQcSQ7NwyTdOL6irydnw3Vq/XY3V1lU6ng2VZxONxSqUSlmVx4cJFNjY3aNTrbG5uYpqmnDjod4AND4+wb98siURijzsqFovJaZC+C+1mPVLXT9t7JQ4pH87dqFPqRrG9VwOcXuqyLxe/+37ieQEIC/SD0OLiIufOnQPEFM3NzU0ikQg//uM/zje/+U0uXrxIIpGQl5+bm+Pnfu7nOHXqFD/5kz/Jxz/+cf7P//P/5Mknn+RTn/oUhmGwurrK7/3e7zE7O0uv1+Pw4cNcuHCBJ598ks3NTX7mZ36Gz3zmM3Q6HU6dOvV6PfQfavV6PZ544gkefPBB7rrrLp599lnuv/9+HnzwQX7zN3+Te+65h9nZWf7RP/pH/K//6//Kpz71Kf7iL/6Cxx9/nP/lf/lfePjhh5mfn+fWW2/lf/6f/2f+p//pf+KjH/0ozz77LHfffTcf/ehH5RCOYrHIRz7yEf7JP/kn/G//2//Gr/7qrwZgKlCgQIECBQr0qvSqwNSNbPrX/y4UCpFIxGXJsqLsLrQNQ0AXRVVRHAd/0JeqqXJx77rOHoeU38lkmqYEGpZt43igyB9jPwhKDMOQcbRDhw4yMTHBxsY6i4uLbG1tUa/XqFazzM3Nkc8LUPb4449hGAbj4+Pcc889TE5OCjeVJQrc/TLqwYWsruvUajXxWFEwLVOWlYs0lN+35NLt6nI/bG1tcebsWa4tLuK6LoVCgZ2dbQlYRAl3ieGhIer1Or1ej3Qmw8TEhFdW3Zb9O7qu0263sW1bQhhVVUmnxVQ4yxKxwn7f4OzZc4BLNBrjwIH9HDp0mH37ZhkdHSWTyTA3N8fw8DBbW1tUq1WuXbtGtVqj3W6zf/9+r+Bc7IOhoSFGR0fZv3+/OM6ew03sH4V4PCb7oVwXbNvypgIa2LaDqirS9TIIFXw4pWkaOzs7xONxHEd0a7VaLZLJJPfddx933303AJ1OB0VRZGyuVCp5dVcCtiSTSbl/wuEwY2NjhMNhisWiN9EwLmHewsIC58+fJx6PMzU1RTgcxnEcEokEU1NTqKroMms0GliWdcPniN/9lEqlvD61lNw2/3z1//Vh0GCUrtPpiIhes0Xf6O/peQIBLG3bptlsyi//OtVqlXq9jq7re2Dd4HNz8Hk7GM0c/BrcTt+xpOu6LNi/Huj4Uy8zmQzJZFK6qB577DEJin2o6MPQTke4vkKhEBMTE4yPj0voFI1GZYQnkUjscUdd3yN1s0l7Nyo3v1HJuX/cbqZXCn1ebzj0SqNUgQJ9P/rUpz7Fu971Ltlrd6OpeidOnODb3/42jz32GFNTU4yNjbGzs8N/+k//aU/0eW5ujg9+8IP83u/9HqOjo3LAgz9pdH5+/kW3HejV6atf/Sr/6l/9K1qtFp/4xCf4p//0n9Lr9ahUKpw4cYJqtcr4+DjJZJJKpcKv//qvc//99/P000/zyU9+ko985CPy/6MjIyN0Oh3+43/8jzSbTUqlEufPn+e5557j1KlTLC4u8nf+zt9hfX2dra2t1/uhBwoUKFCgQIF+yPSaRfl8KYpCJpOh5pWeKwqEwmEc2yYcDrG1VSEWi6JpIc8lIya27S6KRbxLlIpbmJaJaZhe3MqUziXYuwiT0Mqy5fV7vb63uFZFFM4wiMXiRKMxDKNPs9lkZWVFujYikQgTExPMzs7KRbf/r+82GXSoDEIRvxspmUwRjca8BSqypNl1HXZ2qpw7d46rVxdklEpRYHV1zXtMwjEmJsRp4Llv1tfXxcS+NdHdU6tVyWaztNttkskkrVYL13XlAt0/DqFwmLjnUqpUKkSiovcp1NV55JHv8tjjj5NOpSkU8oyMjDI7O8Pk5CSGYZBMiI4lwzA8t5m7ZzH/0EMP8eCDD5JIJFE1lZCEAbsQQAClLqYpFiN+75QoTA+haQJm+W4pwzDk/fnF5ZVKxSus77O5uclQeYhEMkG/3/fAWRVVVbnrrrtuCAVUVZWxvVQqRTKZlF1V3W6XhYUFrl27xrVr1+j1erJ03IcwpmnKaGq5XKbZbO6ZWAfIiGcmkyGbzZJMJuV1BgHQIATZhbWGjN61220JVAfPbdM06Xa70g0lCu5bNJtNarUarVZLXs9/zNdHTX0N3u7LgandPjeHSCRCr9eTQOl6p5Gu67LHLRaLEYlEpDNMAGaLXq+HYRg4toOiKmSzWSYnJ8nlctIZlUqlZPm83x/lO6Nertj8ehjlg86bQambuaReiV4JiHq9YVWgQH9Z+a8H09PT1Go19u3bxze+8Y0bXvb06dN88IMflBNk19fXqdVq0g0McPjwYX7913+dL33pS+zbt4+TJ0/y/ve/n7/1t/4Wt956K5/5zGf+qh7a32hdvnyZZDLJd77zHU6ePMnw8DB/+Id/SCKRYHZ2lqeeeoq3v/3tANJl67qujFrffvvtfPrTn+bee+8FkO+//A9m6vU6P/mTP8nJkyf59Kc/zR/+4R/yD/7BPwgAeaBAgQIFChToVUtxX8E7iGazSTabZWFhYc8EM7jxJ/T1ep0zZ87iug6gSOgSjcVo1BuEQpoAAJEIIU0AGNd1sG0R3/OBlGEYHpQyvOiXIxxVCGTjR5nsAfeUiEiJ26nX66ytrVEsFrl06RJLS0uYpilLobPZHLfffhtnz56VMa+DBw/yt/7W3yKfz8vIXKVSYX5+nna7vSdW5TiOBEG6rg9AF00sqMNhcKFer3HlyjyLiwu0Wi1sWyzWXVzhqZIwQIC8oeFhcKFarYr9Fo0yMTEhFvWuS6vZJBQKYxgC4FiWTSQSJpFI4LquLJX2u7D8BXkoFPbiWIrs6XEcF13vEgqFpVPofe/7Sc6cOcPjjz9OrVZjZmaGBx988EUF0QIAaGiaKgGZDwVisRjpdFq6YkSXlSrPGR8k+v1OiqKQSCRIJpPE42IKYb/fZ2triytXrvDEE0/wwQ9+kGgkSi6fk04q/3KpVEo6kHyXj++6EQXriuhoajTY3t5mc3NT9HJ1OgPOoAjhUIh4Ii7jff6x7fcFyPRjbr4ryu+KEmXqyp7nw/Uw4vronQ83HdsBxV/8CXdZr9ej1WrRbrfRdV064/zI3mA/2Y3AystBlusB1c2+Bp9bpmlKgDYIiwbhkC8fjg26yxRFTOQbHR1leHiYVColgWE8Hr+hM+qlXFIvF9kbdEq9VGzvRvvppfbdS/VQ3exyN/vdzX7/Sv/+Umq1WoyPj9NoNF70mh0okP//9Feq4eFh2u02iqJw7NgxGo0GFy5ceEkAMTU1xcTEhOwvmpyc5Nq1a/Lv5XKZubk5+QHB2bNnyWQyHD9+nOXl5Rs6sQJ9fzpw4ADb29vUajUOHz7M+vo6zWaTo0ePEo/H0XWdhYUFJiYmuHjxIrlcjnQ6zfLyMjMzM4yPj1Ov11laWmJ8fJz5+XnK5bL8IOn222/n7NmzvOc97+Gd73wnf//v/33pMA/0g1Hwuh4oUKBAgf4m6jUHU67rYpomp06dkn1IlmWJzp9kklazhWEaRMJhMpkMoVBIwiXTtMSUNMPAMASYsizTi+45A/fl4rgurrMb+xOwQ0T//CifpmkybpfP57ly5YoEIO12h0Ihz8mTJ1lbW5PROEVR+KVf+iWOHzvO+sYGGxvrmKZJNpuVoApEWXmlUkHXdWKxGI7jEI3GPIeHhm07NJsN1tbX2drcpFqt0el0BvqwLDGVUAHXcQccU5BOZ2i1mtJVdb0jBPxOp10XiA9Kpmdm6HY6zM/PE41GxSS2dBpN02g0Gti2LQq+QyHiXs+PZVm4jotpWeRyWd785jfTaDQ5ffoUG5ubzExP8453vOOGjhN/m1KpFM1mU8KAUChMoZCX0/T80vVerydjnn5kKx5PEIvt7Q5SFIVWq0Vlq8LyyjKf+9zn+cAHfkZCv3w+L8GXYRgUi0Xi8bh02/hxEL9vyY8XNJtNeX74YMd3+USjUaKRKJFoRE4s9N1ThmEQi8ZIZ9IyAui7ouSTyQcaojBqT1eU725qNpt7pujV63VZNO73SXU6Hfr9vozoNRoNut2udBxcD6Bu1ot0M4Ay+Py9EagC9rinBh1U/hS9fr8vz0//uGmqtufxK4rozkqn0wwPDzM6Oir7vfxhAjf6ulGH1PVQ6qUm7d3MHfVyLqmb/e5GP79SEPX9wqlXeplBBWAq0M30asFUoEA3UygU4t5772VhYYHV1dXXe3P+xit4XQ8UKFCgQH8T9ZpH+UC8Sclksui6+NTMj/GIHpo0O9UqjiP6dFKpNJZlSRBlGH36fWPAJWUPLIz9BbR7HZCyJfgYLEQXkSOVXq/HxsYGCwsLjI6OUi6XyefzElb43T7+hLcXXjgpoc7Y2BjLy8tUKhVZBt3tdqVjJRwOk0gkvUlzYkHc6XZYX1tne3tbRq8cxyaVShGJhLEsm53qDo7XueW6AqgJWOLQ7XYlFPE1GKUbjCn5UTgQsOzc2bMAEh740Tj/On4/UTQaw7Ft8vk8Q0NDuK4rHUEiEul1epmmtPe/lITT68Xl54ZhSPdTt9ul0+kQiUTIZrMvctv4biIfXDUaDcKhMPFE3IviCYDmO6D8QvFIJEIymZQl6Lqus7a2xubmJltbW9RqNbrdLoYpImQ+aAFeNL0tFAqhqMJZFY1GSSQSqKpKoVCQ9zGoG0EOP8Lou6JqtdqLInrisTroujiP/Il/uq7TarVoNBq0223hkPMcWsLxFropYLnRz/73g/9e//3eY/niqJ///fW9VOVyWR5bvyDddV1cXEJaSO7DXC5HuVyW+3EwpvdSzqhX4pB6JcXmr3V076X2WRDVCxQo0I+qLMvi0Ucffb03I1CgQIECBQr0Q6wfCJhSFIVMJi2hTyQSoVavo2khoE2r2aRQKGBaFqoqyrH7/R49D6SYholt3xxKgSthlB8VGuzy8b/8eF2lUkFRFK5cuSILlUEUsC4sLNBut9E0jUKhQKGQp1gsyoW3ZVns7OygKArdblc6XhzHkZDFskyq1W22t7fpdrt0u13AJRTSSKdFXMkw+l6Mz0VVRHzRNEz6fQPHsQmFQkSjAn74IMmHET5Q8SHc4L72F+j+JDNVVfdMq/M7j3x3l+u6xOMiamfbNhsbG5TLZcbHxzFNC7/EXEAqG8Mw9wCJQfnH5PqFuQ+b+v2+7JYSoEeTx9AHlj7QCIVCxGIxkskkly5dolAocPDQQUzTpFgscvvtt5NOpyWcAuj3+1SrVSqVCltbW+zs7NBqtTD6BpZtyX3gb19IE/BpN964G0NLpVIMDQ3JY3p9NG3wdq4HHH7EzZ/O2Ov1difuefK7ohqNBp1OR8I63xXl74sbA8hX/nWj7bv+d4O/v9HPrwRc3QiK+e49/zgOTtIb7Id6qTLzG03Zu5FDajCierPY3qtxj93s9z9sCjpeAgUKFChQoECBAgUK9MOivxSYujGkEPIdJsKtoxGLxkRJsyViVu12m1AohOOIAnDHcTD6fQxDdA45jv0ih4brTVvDdTEMQ04f8xfDzkD8CEBVVNLpNFNT0zIudfr0aWq1GkNDQ0SjUTnuuNls8uY3v5lDhw6xtLREpVIhmUzKDgzfxVSr1QART4zH4/R6fSqViiynVr0ibMexabc7Xo+TLVwlHngwDVO6amzb9hbfGq4jAI8Pl0Q00ZIRqt39sLufBye7+X1L/v7wQY+u6/LxVqtVGYHb2dmhVCpRLpdpNBosLS2RTqc5cGA/gIRT/X7P284QjmPvKdb2t2ewP0r861CtVlleXubYsWMk4gkUVcGyTBmnAxGj8+OG/vQ1RVF44IEHJCTI5/Pk83lmZmYkAFpaWmJzc4udnW0ZdRssZfX3zSBc8iGfD+4SiQTFYnEPjBp0VPn7wP/X/15VxD7v9YWzq1qtysdkWTatVotMRvRTGYZBu92W2+gXl/tw6vpieX9q3Us5f17ODTS4rS8Fqa4HNC8FmK6/nevl/81390UikT0T9AadTYOQadAddb1D6vrL3iiyN1hqfrPY3ks9xkCBAgX6UdAb3/hGyuUyuq7z7LPPsr29/ZKXVVWVN73pTTz++OM3nDo7MzMj3eeqqnL77bdz8uTJPcNA3vnOd/Kd73yHfr8PwL333svZs2dptVryMrFYjDe96U0vWZ5/M01NTRGLxbh06dKrvm6gQIECBQoU6K+3vm8wdTMoBXgRtzj9fg9QyOWyLC0vk0ql0AbcKrrelYtbFwbie450Rg0CqV3wgXQt+UAiHA7LDifHcTAsg0aj4b2h0imXy9xyyy1cunSJdDot+5X8WNjIyIiMJJmmycLCgowFXr58GYCtrS15XRQFVVG9AuwMkUiUWq3GZmUDXe95kAF0XcTTUEBTNZp6g2azKUGEhFORkCyYvn4B7cMVf6E+6JRyXVfG/vy4HgiHTqfToe9NJ/R7mer1OsVikXA4zPa2cHmpqko0GpWxM0VRPOjkyIikcEbtOogGoaE/Pc2/b3+bpqamUBSFXr/nQTpHTsrzIcZgObqiKF7vVNwDlwJw1Wo1nn76aSqVioznOY6DZdsYHoyTrihv6t71rqd4PE4ul2NoaIhCoUA8Hkfxyvld3D1upRvBGtsWgLFWq1GtVul2ux5EdeQ+ERMhLTY3N2k1W3S6HTqdjpyi5xfn+7c96Ih6KcfPK4mmvRr31I0e243cRzcDP4OOvkF30/UAadDhdKPLDzqkrodRL9ch9VL75fqC8+v1SpxTf5UKooCBAgV6raUoCu9973v53Oc+RyqV4ld+5Vf4N//m3zA5OUk2m+Xy5cv0ej0OHDgAwPz8PJ1OB9d1yWazHDhwgOXlZTY3NwFIpVK8//3v57d+67eYnJzk7W9/O+fOnePo0aNUq1VWV1e5//772draot/vc/HiRfn/u2g0yuHDh2m321SrVe655x6++c1vymqFixcvvmxherFY5Fd/9Vd5/vnnJZjyndjb29sMDQ3RbDZlPcGlS5cwDIP9+/cTCoW4dOkSqVRK9ke222327dvH4uKi94Gj0MTEhNymfr/PyMgIkUiE5eXlGwK7QIECBQoUKNBro9ckyncjSKVpYvJeo9FAUUQZdqlYpFKpMDo6imka2LboQQqHw7iuSzQSod/rYVqOB6EGb3/vpDBVUwmHw/R6vQE4ZYvicceRgGpnZ0dGq0B86tdsNllaWmJiYgIQb+ByuZx0K8XjcYy+ISft+ZG37e1tAUtcl3a7g6qqxOMxej1dAhPbFm9cwpGwKBQ3TTKZDENDZdbX11laWsIwjD0AKRKJyH6l6xf+g5ElCdwM40X7XFVUXHadS75rJR6PC3dPr8fm5ibZbJZEPMHOzg65XI5QKCQjZN1ul2QyKSfm+V/+lD9/Ae0Dw1145RKJRMlkMtRqtT2urmQyKY+hYRiyu8svvR6M+fnxvF6vx/b2toznXb16latXr/L000/LKKMPuHy44bu4fBClqhrhsIjnlUolisUiqVRaONO87bMtG0VVUNgLMwYjYIZhiOLyao1GU8Tt/BL7RCIBIPdds9kU8KnTpd1p02w2X7YrahCu3Oj7l/r7K3VMvZTb6ka9TDeCSS8FhAbh6M2A1o3g1I1cVIO/ux5GXX8fN9o/g4/7Rt8P/vvDqABeBQoU6NUqEokwMTFBPp9nZ2cHVVW55ZZbKJVKvPWtb+XLX/4yH/zgB3n++efZ2NjgJ37iJ1haWuJ//B//R55//nn279/Pn/zJn2DbNhcuXOADH/gAxWKRt73tbXzzm9/kF37hF9jZ2eHAgQP8t//238hmsxw/fpxDhw7xmc98hgcffJBKpcIHPvABNjc3SSQS0ik1MzPDz/7sz3Lp0iXe+MY38ju/8zu4rkssFqNUKskptCA+VPr5n/95vvKVr1AsFuXji8fj/IN/8A/42Mc+xt//+3+fL37xi7z3ve/l/PnzPPjgg/z+7/8+t9xyCxMTExw5cgTXdXn729/O7//+7/Pggw+ysLBAOBzmqaeeAuDWW2/lx3/8x7l06RLvfve7+d3f/V0+9rGP8bWvfY3/8l/+SwCmAgUKFChQoB+gfiAdUyAcPrFYTEAeL7aVzeZk9KlYLHhOHHF5f2Hql2Vblj3gkgIfTIlpfMLJ4zst/LibZRmYJtJ947tZ/L8riug8uv3223n88cfpdDoMDQ2RyWRIpzPYtk29XhdT8xxbupdarRa6rhOJiDLnrt4FBJCo1eqsr2+Qz+fI53M4jigvj0QidLs6pmVimgYrKytsbW3KfeM4jnT2+PvHX3j6ETRN02SJOSAjcz4Q8gGH/70Pa2zbxrZE+Xin00HTNGKxmIwsZjIZMpkM3e6AW80DSX5nl23bsjzdB0F+x5UABaqMV4q/OYyPj2NZFp1OB9gtYHddV8KneDxOLBbj/2/vzmPkvuv7jz+/37nPve9d22s7tnftdWzHIcYh5CBOAkloQghJixJCoEBDaVAr2kqVStXCTwgqUKVSUaniDhUCBRICVEDukNiJc/iIr7289z33PfP9fn9/fOf79e5mT8fJEuf9kEY7x3e+38/MrNczr3l/3p9gMIjX60FVHZRKJZLJJIODg0xOTjI9PW2vXmdNOczn83YDfSuMssbudDhxOM/9/lRWVlJTU1Numm727DJXPtQolYwlq5EMwyCbyRKLx4hEInYF2ewpfvl8nnw+TzabtcMn65RIJOyqO+s1nd8raqmwaanwZbnKoMVCrYWqjJarbloohFqoUmmpY80+znIh1WLjWej5WOw5mf9czP43tZKQarFKKgmEhBDvRMVikaGhIYaGhvjgBz9IW1sbW7ZsIRKJ0N7eztTUFMlkks2bN/Pcc8/hcDhobW1lbGyMRx99dM6+SqUSf/zjH+39/OxnP+Ozn/0sBw8epFQq0djYSCKR4JFHHmH37t20tbXZX1DW1NTw7W9/G03T7NUgd+zYUX7vFbJXaC4WizQ3N3Pbbbdx5MgRHn/8cQDuvvtuwuEwra2tbNiwgWAwaK+4G4vFuOaaa4jFYjQ3N1NZWVmuYg+xceNGWltbKZVKXHLJJfT29vLYY49x8OBBfD4f73vf++ZUS+3du5fHHnuM48eP84//+I9UV1czMjLCD37wgzlTFoUQQghx4b1lwZSiKOWpWp7yB3szEKirq2NkZJRkMkkwGCo38jY/+FlTvfx+P6lUGk0r2U3PDatiqhwQ6Lp5sqbCWQGIFSapqmpPE7IqgKyqH5fLxY033oimaWzevBmfz8fJkyd55plnWL9hA/V1dYC1apuLQMBPOBwmHk+YK+gVS3ZfKCvscrlcFItFYrE4hULefjyKojA5Ocnk5IQdfFhVM1YljWX2B3KrOsra3gohrMDKCqMWCg8AMEA3dPuxW6vzWc9JfX09Pp+PZDJpT4O0PtBbY8rn829oMG9WJp37oG5NucxmczQ0eOjo6CASiTAxMUGpVCIUClFdXU04HMbv99tjz+fzDA+PMDU1Zb85to5jrs5YsJ8D67mxKqqsx202i/fYx6isrMTr9QLnpnnOXpFQVRYOTzRNI51OE41GicViZLNZOwy0QgmrJ5gVPllBVDKZtMe+kqqopQKdlVRKLVU1tVAF1HLh00Ir3C3XWHyhMG2hx7VYNdbsywtdt1ggtdRjt16rtQiR3o5jSjgmhFgtVVVpamoyK9LLPTXdbjcDAwPs3buXiooKTp8+za5du2htbUXXdUZHR2ltbeWGG24gFArxy1/+0g5lXnjhBb7zne/wv//7v6RSKY4cOWKHQ6dOneK6664zv9ibNaXf6qt466234vV6ef7559F1nRMnTtDsRXnhAAA6pklEQVTV1cXg4CBnz561F3Xp6+vjm9/85pzH8bvf/Y7q6mo2btxIRUXFnAVgfve73/G1r32NL3/5y2QyGfbu3cvg4CCDg4O0tbURj8dJJpNUVVVRKpWIRCKoqtln8vDhw+zbt48nnngCXdc5evQoN998M21tbaiqSjQaZXp6WkIpIYQQ4m2gGCtYvimRSFBRUUF/fz/hcBhYfFn52ZczmQyjo6MkEgk72NB1g5nIDMlEgoqKSnw+L8Vy0ONwmB88M5ms2Uw8nUIvh0l6uUrKCp+sn2ZApVMsFuwpaVZfKOvNhMvlIhgMUl1dTW1tLaFQyG6ybU3rymSy/N///RaAjo4OuxF2fX0D4bD55sxcWdBButy43el0lXsgzeBwOClpJbSS2YtJVR2oDpVoecU4q7LLCo6KxXMr3Vmh0uweTbOrTazQyKqYslbfc7s9uFzON3zAn18BZK1+l81kSSQTdg+JzZs3UyqVSKVS9vPlcDj4wAc+gK7rPP3004yPjxMIBPjkJz9JbW2tPTY7ADNfcUKhMHv27LGnPVrT2zRNs5/rdDrNzMyMHUTZb151g5Jm9vWyXr/ZwVwqleKll17ixhtvxOl0EgqF7IbowWAQh8MxJ3hcqEpodhCiKIpdCReLxUgmk/ZKi9bvQyqVKv++6vYb22QyaQdTVn8p6/lYaYi0WIXRUsHOSva10GOd/7gXq1JarjJqJZVbSz0HS41zsdP5BFKrqZJ6sxVTS92+WIi0VLi0XPC02mAqkUjQ2tpKPB63/2YLYbH+TxcXt23bttlBTn9/P/F4nB07dqAoColEgtHRUbZt24ZhGLz++uu0trYyNDREdXU127Zt4+zZswwPD8/Z5+bNmxkZGbFX+e3q6iKZTHLmzBlaW1sZGRkhEAjYq7NOTk7icrno6uoikUjQ09NDY2MjQ0NDrFu3jubmZk6dOmVP21uK1VrB6nvldrvZv38/11xzDV/5ylcolUq0tbXR2trKqVOnyGQy7Ny50/4SKZfLkc/nSSaTNDU1sWHDBrq7u+2m8IqisHHjRurq6nj99dfJZrM0NTUxNDR0wV+bN0P+rgshhLgYnVcwtZJQCszwZWxsjEgkSqGQL0/DMyuaotEYhUKBcDiEx+2hWCricFgVQAq5XI5kMlnu56OjGzrGrGbnVjCl6eeut/oPlTQNZ7mEvKKiwm54OX+Mc8ZuwNDwEM8//zw7d+6kpqYGXTfweNy4PR6ee/Y5EokEqkM1p/W53OWpeDrRaLRcCWQ2vwYzEJqZMfskWf2krOl21tRC60O4FVpZH66tqWrWtEbrvlazcGv1M4/HY09btE6zzX6urFMuZ64kNzMzQygUYtOmTZirDBbJZrMUi0WuvfZaDMPg8ccfZ2pqCo/Hwyc/+UlaWlrmHGPu6oAOLrtsD+3t7aTTaSYmJsjn8uTyOWKxGJOTk6TTadwuNz6/z6y8MqBYKp6raCtpdkBlVW45HA5yuRyvvvoq999/vz09z5pWOLuqDM6FFAsFLvl8/lwfqFmNy2c/P9YqeolEgmQiSTKVtCukrH5mKw2iVlIFtVQQs9JKpIUqo1YSRi3WH2q5sazktNjYF3sMKzmmdXn267zQ+YV+LnV+ucur2Xahy8tdv9xtK7l9PgmmxFIkmBIXA5fLxZ49e+jt7V1yxcGLjfxdF0IIcTF601P5FEV5QzhlcTgc5nQ21fxQZZSn3wH4/T67WscIGHi9PvKFPE6HE8NQcLs9VFU58fl8dn+hbDZrT+3S7SoqAwPsShrr5PF4UVWlXFGlzQqzzMorcxyGHXCVSiUqKiq56qqr7ModXdfNYMplVVfpKKgoKDhdzvJYddxuD06XE1e5Z5RhGExOTjI1NW33kbJCJus5c7lcOB1Ou5eVFVB5vV78fr89jc7pdJb7MXntnl3WFMHZqxJafZCABStlrCDH6XQSCAQIh8OMjY3R399Pe3s7DocDv99v90dyqI5zFUicq76aXdJeKBRwu92oqkqpVKS3t5empiaCwaBdqt/T00MsFsftdlFXV4/DYfamwgDN0FDVc/22VFVFKZlTQK1vRquqqkin0wwMDJTDQn1OtZn1eK3ndXbYMXsagVUVZT0m66TrejkETZFImKslWivopdPpOf3JrNdtJQHMYoHVSoKYxYKcpYKolVRGLTZNbiUh0XKn2c//QiHScmHbYuHe7Otn/81ZSRi10lBqob9pK7XSbVcbLAkhhFhasVjk0KFDaz0MIYQQQlwA5xVMLRVGzd/O5/PhdDjI2VPwdLvqxAydcnZTa7/fTy6Xw1xRTaWurp5YLGoHMVYfH8MwcKgOvLMCDK/Xh9NpPZxzY3M4zn2otVb3s8KNQkGzQ4lIZIaZmRlaW1vtfgxmI+ucfX9dN1CtaXcOpxlSqAr+YgkwcJSDqnQ6zdjYmL0CnLXqoPXh1O/3oyiKHVRZfaOsarR0Oo3L5aKqqsoOqnw+H4DdXLxQKKCVNHRjbhXZ7OfeCiWs1e/cbrcdUoVCIVwuFxMTE4yPj9PY2GiPVVEUs6dXOfjTdM0O6qznQtN0Dh8+zK5duwgEAiiKQiQSoa+vj23bthEMBqmrq6O3t5dEIo5hGAQCAXuVPmt8VlBkBWZVVVUEAgG7abmmafa0P03T3hBKWPuyxmWFnclkkkwmY4dYqWQK1WH2qZq/it7sqXpWeGXtf3bT8tVUAq02eFosJFouhJofSC20qt1Cp6VCqdUEUotVLa3ktNAxljru/H1bl5f6udT5ldy+2kDpfAIoCa2EEEIIIYQQ72ZvWfNzi8fjwev1kkym0LQS1jQ3M6gBl8uNVp6+lU6n8fv9FAoFsrksIyPDqKqKz+dDUZRyY2sFw8jj9XppamrC4/GaYVe5CgdzLb9Z2ZR5WS9P8zMbbGsUiwXi8QQzM9NMTk6ZlVuGTlVVNWDYfZxQzClxKNgN2BVVsafTKQrkcjkK+TyaZq4i19PTg67ruFxuXC6nPeVMURQ7lCoUCnZVj1UFlcvlAOZVfnnQNI1MJjNrlbwSpVJxzjQ0wD4GnPuwey6scOB2m9MDrefT7XbT0NDAyMgIbrebYDBoVwm5XW4MzDBN0zR75cBzfbFUrrjiCjvoUhSzguvkyZOoqtlwNZ/P20FJNpslHo/j8XjsMft8PhoaGgiHw3a4pes6ulY+Gbr9WBarmrGm3yWTSVLpFPlcfs4UP+t5MTDsKqh4PG4HUplMZs6qiIqyeOPyxSp8VhI0rSSQWiqMWqj6aaEwaqk+UctVRK0kmJr9PC10fvZ15xNWrTaUWihUWi6IEkKId4OmpiYuv/xyxsfHefnllwHYv38/dXV1PPnkk/aKdIqisHPnTrZu3crBgwcZHBzE6XSyf/9+ampqeOqpp4hGowSDQa699lqy2SzPPPOM/X9nVVUV27dv57nnnrsg477mmms4fPiw3euxq6uLqakpxsfH7W2cTicf+MAH+P3vfz9nERkhhBBCvDO9ZcGUFQpYlTDRaJR83gyj7JX2yqvtnevPpJPJZPB4vDgcZuWQFXwEg8HylLESVVUNVFRUEA6HyWSyFIolVEVBURXKRVHlFelK5POF8vQzA8MATTOrZdLpDKlUiny+gKoqOBwqxaJOJpPG5/OXH4WCQ3WgKCoup9Oc3lYq4VDVctNtlWg0hs/rxeN2Mz09w/DwMJqmEQqF7OAEzJJzv9/cr7VSHGCvVJfNZu0V5qygRtd1u2GntVLd7Molt9uNy+XC7/fbQYLVZyuXy9nNxwuFQrmPVdG+zefz2QFMfX098Xgcv99PPB4nnU6jhlT7NbL2icHsBfnKK/QxZ6pbsVjk2NFjjI+PEw6HaWxsRFHM/k5er5eKigoqKiqorKy0+2NZVVN2mKSAoiqohvkczQ9RrIona2W8YrE4Z4oeMGuKXpJ4PG6HUdbzOb+n12KBzJutelpoP/PPW8dfLHha6LRUZdRqg6iVVEfB0iGUZbkAa7mgynq9l7rPYmNZ6fnFrnsz2wshxJ8Sp9PJ5z73OX71q19x9dVXUygUWL9+PfX19fT09PDFL36Rr371qxQKBXbu3MnNN9/M448/zuc//3m+8Y1vcNVVV1FdXU1/fz9f/OIX+drXvsYDDzzAkSNHaGxs5C/+4i/4/ve/D0A4HGbXrl0cPHiQ5uZmotEoNTU11NTU0NfXR3t7O8PDw0xOTs4Zo/Xew+v1oigKmUyGyspKLr/8cjKZDJlMhpMnT9r9L51OJ1u2bAHg7NmzXHXVVYyPj5NIJOjv78fr9bJ161ampqaIxWL4/X6mp6dpaGggFotRV1dHdXU1p0+fxu1229XZg4ODgPl/z6ZNm+yVmh0OB1u3biUWizE4OGgvnhMMBhkdHWXDhg2cOnWKVCpFa2urve9SqcS2bdvI5XL09fWtaIaBEEII8W73lgVTViijKAqBQACv10sqlUbTSnP6FBm6gW7oOBxOrM95hUIeh8NpV/BkMhn8fj/V1dU0Nzfj9XqZmZmhp6eHRCJBbW0t9fX1FAoF8vm83TDc/MANYFAqaWhaiVI5wHA4VFwuJ263C5fLjdNpBljRaKy8ity5ZuJWlZHfb06nM6cbKiSTSYrFAplMkUgkQiQSoVAoUFNTg9/vJ5PJ2FPzKioqMAyDbDZrBzpghjrFYtF+s1NRUYHT6bTflBXyBQrFgv14rCDCrJwqsmXLFi677DJ7X1aj+HzObDo+MDDA9PS0HcZY/bOKxaI9vQ/MN2TxeBxFMVfriUajFItFXC4XxaIZaBkYjI6M0tjYWH5uVdxuF4VCYc4bL93QmZqaIh6PEwqFaG1txefzEQqG8HjPNaGf37h89pTL+T2kNE0jFosRj8fJ5XJz+kzNDv+y2SyJRMLe1mpcbm1vBRoul8ve90qCqOXCp5UGVotVRs2viLKmd1oh1GIr6S0XSC33WFYbRi0VBq3k52LB1HLXLbWf+eOYf34lLsT2i+3jzQZYEoAJIVZL13VmZmbYtWsXNTU1xONx9u7dy7//+78Tj8e5/PLLaW5utgOehx9+mFOnTrFu3TouvfRSLrvsMr7+9a+TSCR4z3veQ0dHBz6fj9/97nd4vV6+/OUv2+8PwAzC7rrrLqanp0mn09xxxx28/vrrPPDAA/z2t7/lz//8z/mnf/onu8oK4LLLLiOfz3PJJZfgdrt59dVXqaysJBAIsHv3bmpra6mrq6OhoYFTp05x00034Xa77S+empubueSSS9i/fz/f+ta3+PCHP8zY2Bi33norv/3tbzlw4ADf+ta3+PSnP83//d//cf3119PX18cVV1xBT08P99xzDz/+8Y/tYOqGG25g06ZNRKNRdF3nhhtuYGBggM2bN/PUU09x2WWX2e0VwuEwx48f5/3vfz+//vWvue222+x9Hz16lKuvvpoTJ04wMjJiV8MLIYQQYnHnHUxZwdNiZt/m8XgIBkNEo+bqfGajbjMcQjHDqWKxgMvlRlUVUJRyQ3IDvz+AppXshtRer5fq6hoymQzhcJj169cDlMOWUvlDtwNdN+yG4IZhmFP9ALV8u8PpxOVy43Z78HgLdtWNFRCZY1NQVQWXy0k4HCaRSKLrOoVCgomJSVwus2rFmiKm6zrV1dVUVVWTzZrji0ajVFZUoukaqVQKp9NJoVCwV6VTVdWetldRUUGpVCISidjVTtb0PFVV7YBooX5SZvCCfR8r8IjFYui6Xq4uK/dc0s9VQWklDYfT7EOVSCTsSqzR0VGAucFUuZm4FQ41Nzej6zper9eechkKhewg0uppZYUg81dzVBUVHR1QUBTDDo4Mw7CrouLxOENDQ8zMmNVo1mOfnJyksrISgFQqRTwenxNGZbNZOwBdrIppNUHUUmHTcsHVYkHUQlPyVhtGLTfGlQZS1u/RSkKh2T8Xu27+9SsJqRY7v9Lga6nzC41rpfdZSTB0vqGUhE5CiLeC+V6pmkOHDqGqKhs3brT7OSrKuR6PYL5vsL6ssVYJNr/AOzc1vFQq2fdVVfUNXyBdffXVTE5O8rd/+7fs3r2b5557jieffJKWlhYefvhhtm/fjsfjmRNMvfLKK9x///12X8f9+/fzox/9iM7OTh599FEaGhrYuXOnfext27bx1a9+lVwuh8fjYXR0lEceeQS/309rayu7d++2p+c7HA7S6TTXXXcdIyMjbNy4kWAwSDAYtCu2n376aZ544gl7PJdddhnf+c53mJmZobq6Gr/fz8MPP8zmzZu54YYbKJVK/OY3v8Hv99PV1cXPf/5z/uEf/oHdu3cTCATsfQ8PDwOwfv16+ws3IYQQQiztglRMLRdSmeFLEL8/QDqdAczlqoPBAEr5P23dMMjn83YFk2L1dqKEqqpUV1dTKBRJJhOMjY1SXV2Ny+Uim83aU7YCgcCsN0uG/SFc13XQDcozw3A4DJy62bzc4/GYwUuhWH7z48DhMHsyaVoJQ1HLoZVOsVREgTkhlMfjobq6mqmpaUKhENXV1aiqisvlQtd1fD4fLpeLmZkZXC6XPRVPUZQ3TN2zVpDL5/NmcKOqNDQ0MDMzYzcAt6b8WW92dF23e09ZKw5qmsbk5CQvv/IKuqZBuQ9WIBBAVVWz2Xy5X1aOHC793BvSmpoaQqGQ3Wzc6t1gvZncvHkzxUIRVVXt1f3q6+sJhUJzKsEW+z3BwO5dhYI9Xc96HNa0O6vBu2EYpFIpdF23x5PL5dB1nf7+fqLRqL299bxZYcbsN9ErCaNWGkrN3265QGq56XlWCDU7jJp9P+tDxOyfKxnPak7268PSgdJKz690f6u5bv5tqxnLcmNc7D4SHAkh3omsEMp6f1EqlXjmmWf49Kc/zdmzZymVSmSzWa6//nqeeOIJ7r33XjZu3EhnZye//vWv8Xg8/OVf/iWDg4MUCgVOnTrF5OQk99xzD6FQiEOHDtkLuAAcPnyYQ4cO8bGPfYyenh77vY71pZamaVRWVrJv3z5+//vfAzA1NUVlZSWvvPIKmqaxb98+JiYm5rz/sN7TlUol+vr6uOOOOygUCrzyyitz9p3P5zl27JhdrX3mzBlSqRRf/vKX+dKXvoTf72fjxo0MDg7S09ODw+FgampqznN27NgxPvrRjzIxMUFvby+6rnPLLbewadMmXnzxRbZv326Pyzq2ruscO3aM9evX2/sOBoMcPXqUK6+8ktraWrsiSwghhBCLU4wVTH5PJBJUVFTQ399POBy2r59fAbPYT7PKqMDo6CgDAwOk0xkyGXOKXlVVNYpirvJmlJtdu1yucqPzcx8M8/k8Ho8Ht9uNYRgkEknS6RRut9uu2Jk9FmNWB3TrjY21el1JM1eZK+QL5PM50uk0qVSafD6H1+sr97QyV5CrqamhWCxy6vRphoeGCASCjI2N4XKZIQ6Yoc/Jkyftleiy2Sxer5doNEooFGJ8fNyexmdNMzT7LYUJhcJ4PB5SqZRd0QQQDAbZvHkz9fX1vPTSSwwNDaFrGtlcbs7j3bp1Kzt37iSby2HoBqqqMDU9zdNPPcXMzAxerxev10soFLKDMkUxm6+73e45AYj1hut973sfg4OD9PX1oShmNdadd97JRz7yEXtKYiaTIRgM2n2zFvqdsD/Uz+tNZW1jrTAYi8WIxWJ2lZjdf6y83czMDI8//jhXXnklkUiEWCxGIpEgk8nM6RW10uBpNUHUSsOo2ZcXWzlvfhXU/Mvns5LeUsHbYuHTaqqS5p8/n9sWu261gdT5jG81153vfla77XK3nc928yUSCVpbW4nH43P+ZgsB5/5PFxe36upqtm7dSiQSobu7G8Mw6OjooKKigtdee43KykquvfZafvKTn7B+/XrWr1/PsWPHiEajKIoyZ1vrS7Fdu3aRzWY5fvy4/cWVx+OhpqaG8fFx2tvbiUQiqKrZHqCpqYmhoSHWrVuHpmlcc801PPTQQ/YYW1paSCaTGIZBRUUFw8PDtLa2MjExYfeBst5zFAoFurq6MAyD119/naamJgYHB6mrq7O/MOzq6iIWi9Hd3Y2qqmzevJkzZ85gGAYbNmygsbGREydO2F/0RCIReywOh4OOjg78fj9HjhzB5XKxY8cOu3VEQ0MD8Xjc/mJuenqatrY2BgcHWb9+vb3vYrFIZ2cnmUyGU6dOXfAeU/J3XQghxMXobQumSqUSiUSCs2cHGB8fo1QqMTk5idPptMMpMzgyq50cDgdenw93ufIolUrjdpuBldPptPs+RaNRZmZm0HUdt9uD0+nATEHKU/fUuX2LrLGUShqFQp5MJkMiYU4T1LQSLS2t5jQw3Sxtr6muRlUddHd309fXS01tLTPT5vEqKsLouk4kEqG/v5+qqio8Hg9+v59EIkFlZaXdq8CaoqdpOl7vuUqp2aGUVc5eV1dPZ6f55qhQLNJ9ppszZ07P+RbRqk7asmULO7q67NL1qclJnnrqKSYnJ+1S/JqaGns1PK/XS319PTMzM+bqe+UeUw6Hw67k+uQnP8mLh17kxRdfxOkyi+q+9KUvccstt9jPofWcLvfBWUGxQymjXBWXSCSIRCJ2I3KrQbs1lcDqu5VKpYjFYoyMjPDSiy+xafMm8vm8Pc6lAqkLEUqtNIyaX+G0WEXUQpcXqqxaLPBabRC12hBqpdVHq7m8XDi12M/V7mOhcZzv+N/sdW/2tvPZbr5kMklLS4t8gBELkmBKgPkloKIoc6bXXUzHu1jJ33UhhBAXowve/FxR3thLyOL1eqmqqiQWi5JIJPAHAnbIY4VTcK5sO1nuKeXz+/H7/bhcZiBl9h/KUigUqK+vt1dNGRoaIh5PlBubmyu+mSGK2SvK6otQLBaJxWJEo1ESCXPqnKZp5ZX5CmZTdMUNmM3ErdBG0zSymSz5fB6Xy1kOnMxpc2bYVcLlMiu6rCmCViWSVfbtcbsJh8Plx+OaE0o5HA7WrVtHR0cnqmqOxQzhvGQzGdLlZuqBQMAOphRVxaGq+Lw+hoeHePHFF4lGo/ZUwKqqKrvKLBgM0tnZyfr16zl+/HV6errtpupWyKTrOul0mmLJ7LmFYk7xSyaT9odka3vr50K/AxZN08ikM0SjUaLRqLnvWX2yrNcjkUjYgV4sFiMSidhVUalUimzOfN4V5Y1T9N7sz5UEVQudVjpFb7mqqNnT81YSRq1mqp71eqwkhLpQYdRS+1rsWKsJrpY7xmq2uRAB1JvZVggh1opVoX2xHk8IIYQQ7xwXPJiaH1RYQZX14dPsw1RDMpnC7XLhdruJx+Noml7uz6TMmcpllW97PB50XbOnsZk9lTQikQiKolBXV8f69evJZDIMDQ0xMjJCJpMphwNONM0cQzqdtpcXLhQKaFoJq7qqWCoxMmL2r6qrq6NUKpZXEXSQTCZRVUc5xCoRCPjLfQYKpFIpNE0jnU7jdruZnp6msbGReDyOz+eze0c5VAeBYACPx4PX6yWTzpDOpO1eEO3t7Wzbtg1FUSiWm4+6nU4cTiexcsNxq4Tc+vDrLIcaAwODPPfcs8RiMcCsgKqsrLSnRHq9XrZt28aGDe04HA7a2zcwNjZqP0dW2GEYBocOHmImMoPb40bBDKvOnj1rh1FWxZL1+s5//YuFIomkWRUVj8ftKYzW7eem8mlks2aDc2uKntVjy6rKml2RZH3bulBotNJwaaVB1EKB1OxQaakQarnG5QsFUWsVRi12+0KXV3rdcsdZ6JirCcrOZ5wruc9K97XU/VdipfeVgEsIIYQQQgjxbrCqYOrNflBSFAWXy0VlZQXxRCUT4xMEAgFyuSyxWBRd1+zm4QZgGGaQUSgUyOXyOJ3mh/xwOGwHJMlkikKhQDKZpKmpierqanbt2k1XVxeTk5MMDAwwMTFBPl/A4TBXnGtpaaGiooKZmQjRaMRemQYDstkMo6M5PB43VVXV5PI5KORRlPK0REPH4TDDLqu6yJpeVygUiMfjhEIhMpkMiqLMaZLpD/rxer14PB7S6TTZbBYwK5A2bNhAR0eHuZpgqWg2LUdBUVXcLhdOp/lSLVShdPbsWZ588sk3hFI+nw8wy+e3bdvG+vXry83RzR5WTU3N9Pb2AMyZHjg1PYXD4aChvp66+nomJyc5e/Ys+Xz+XChW7htlNR7N5XL2tMpkMlkODvU5QZT1Wlp9pWZmZojH43N6a1m/J1ZVlKIodjWZVQW3UIi0WPj0ZsOo2SGUdX6pEGqxKXorDaLOZ5rechVG5xNCXYhQaiXHXk1YtlQ4tdwxl/Nmg6rVHu98x/NW31cIcfHYvn07V1xxBS+//DKxWIyBgYEFt6uqqrL/bxZCCCGEWAtvqmLKqrBZ6bbWB2+fz0dNdTXJZJJSqYjP56dYLBKNRs3KqZpqHKqKOa3PCmMMCoUiExOTpFJpqqury8fXmZmZ4cSJE6xfv56WlhZaW1uprq5m3bp1rFu3jkwmw8TEBENDQ0xNTeF2u6moqGDdunW88sorTE5OoiiUwyfQdY1oNEpLSyu6rpPJZlAUFY/HA5iN0X0+L9lsjlS54aYVQmUyGbxeL6lUimAwSCqVIlduWG41by8Wi3N6Jem6TjAYRHU4CAZ95PN5Uum02Z+pXG3m9Xrt1eiKxSIej8cO1M6cOUMsFrOnEIbDYVwus3eUy+Vi+/btbGhvt6cWKoqK0+mgvr6Os2f751SoKYpiVzd5/H6cTicej4dsNmv3NbACpmQyyczMDJFIhHQ6ba/CY62QaD22XC5PIhEnEokSiczYVVFWTy1FObfa3EKBjBVMWc3aVxIuLbbNasOolayiN7+aaqnTQmHUYgHU/OvgXM+08w2k5p9fyeWVXrfSy6sJplZy2/mOZ6nrzyfcWe4+EhgJId4ubrebT3ziEzzzzDP2KreNjY0oyrnFT+rr6+nt7eWjH/0omUyGRx55hFQqtdZDF0IIIcS70KqDqdWEUQvd1/oQHwgEqKutJZPO4PF6cOfMoMWqnKqqqrIreOBcOKXrBolEnGw2QygUxul0kMvlcLlcDA0NMTw8zKlTp1i3bh2bNm2ivr6eQCDAxo2bWL9+PalUir6+PiKRCMVisdxwWykfo3wCu9fRunVtDAwM4veb4VkmkyadzhAMhSgUC+Sy2TnLGVur71VUVNghjdV03Ol0loOanL2aXLFYJJvNcvDgQXp6e9m8eTMb29tpaWkxl0DO5QmHw9TW1pabp2v2qn9WJVY8Hjd7SIVChIJBO7wIBALs2LGDhoYGctksuq6jqg6cLieKAqFQGK/Xa1duWayQJZ/PMz09bffLOnv2LC6Xi8nJSWKxGNnyPudXRVmhk9UrKhaLkUql7PFavwfze0UtFhzNDqaWC6KWCqMWujw7VFqueflyTcuXCqEuRFXUSoOotzrgWel15zOO1QRob3a8F2Lbldy2Gm92PxJ+CSHA7A1ZUVGBw+Ggq6uL6elprr76arxeLz/5yU+45557OHjwILlcjnA4TCAQsBdjEUIIIYR4u13wHlMLmf+hWlXNfkGhUIi6ulqGh0cIBPwUi+b0r3g8jq7rVFZWoapKOQw7F04ZBhQKBaanp8ypZLqOUQ5IrFXyIpEIJ0+epKGhgY0bN9LW1lauJDJX+UskEuXm2tl5gwXFMJt09vX3UVlZgT/gN8OzrIvR0RiFQoFsJkRFOGw/HsAOUMyphzl7NT5rCpqqmr2xrFDKMAxyuRwApVKJ8bExpqemOHXyJOvWrWfLlkto37iRbR3b8Hq9hEIh/LOqmKampnG53NTV1TEzMzMnlKqpqaGzs5NQKEQ6k6FYKAIGiqriKjnBMN+4+v1+MpkMMHeVPStEs5qjT01N8Yc//IFUKmUGUbpBJBohHA7bjzmZTBKJROwpetaKe8CSVULL9VRaLJhayfmFAqH5vaLmh1FLTc+bf9/Z+1suiFptILXQv52V/Jx/fiWXV7vtardZzXjONyx6qwKpN3Pbhd5GCCFWIpPJMDAwwLPPPsv+/fvt9wY//OEP6enp4ciRI3R1ddHX10d/fz+jo6PMzMys8aiFEEII8W71tgRTltkf1K2gIRyuoKYmx8TEOH6/H03T0fUCiUQCwzCorKpCYX5vpbkVW6qiYKiqHUyZFTyQy+U4e/YsZ8+exVuePpjNZpmeniadzlAo5CmUAxtzfICO3d8qmUjS3d1NKBQiEU8wPjFBNBrF6XQRi8UIBPxzpqMZhmGHFVb/KTBL6q2V/axpcsAbKpXADLcikQjRaJSenm5aW1vZuXMnra2tXH755ezbt4+unTvRNY0XXjjIE088zo033kQgEKC7uxvDMNi4aRNtbW2Mj40xNDSMqipouo5WKqFpWnmlQbNiyRqD9fxaQRBgT/2zKp+CwaB9m1l9pTIxMcH09DSRSMTuLWXtZ7HQZqlqpoXCpdk9ppYKeFYyRW92CGVVbS02ZW+haqiFqqMWOtb5BFHnG0jN/ve10PmFLq/muvPd35u9/HZddz7XX6jbhRDirWItImJVNedyOeLxOG63m0wmQ29vL3v37mVkZIR9+/Zx+vRpotHoGo9aCCGEEO9GFyyYsoKZpW6fHVZYlSkej5vKygry+RzTMzN4vV50XaNUKpFIJFBVlVAojDWND8PA4FwAZQVB5nQ8MywxA6pz08t03SCdSpGIx9E0jVJJs/shzWYY5XCqPJ3PwGB8fIJcLodh6FRVVYGBudpcIk6xWDjXOB3s4MLj8eB0Osnn8/aKd2AGPZqm4XQ6KRaL6LqO2+22V/czAyOH/WE2k8lw8uRJzpw5w773vpfmpiZ6+/qob2go95NyEwyFWLd+HXsye+jp6WHd+vU0NDRw8sQJYrEY6XSadDptH9sKY6qqqlBVlWAwOOc1ArMazdrOmu7Y0NBAMBi0m5ZPTk4SjUbJZDL28zg7tFlp9dBKqqeKxeIbmp8vFm7NP9Zy0/TmX7dY+LSa6XqLBWyrDaHOtyLqragcuhBVU+d7nwsxjgt1rJXctlb+FMckhFg7Dz30EIlEghdeeAFN0+w2BqVSiePHj1NfX8+xY8fQNI14PD7nizMhhBBCiLfTmlZMnQunPOVVYYpoJQ1NK6EbBlrJfLNkbuPFMM71MyqVSiTjMQxdx+P34/X67BDIrOYBXbc+0JsVVOecmxJ4rjeSdb2CMet2r9eLy+Uy+0oFgjgdTnRdIxaLk0qlKJU0FEVFVbErpayQzgokAHtanHVbLpezx1soFEil0nbPJqfTafdfMgyDyy+/nBtvvBGX08nhw4f5w+//QCaTZnh4mKqqKpKJJP5AgM7OTgCeefppcrkcxWLR7utkBWhW9dFyVTu6rqPrOj6fj9raWlKpFEePHCWTzdhVUQuFNqvtt7RYGDX7Oqtya/aUyNVWRi0WPl3oMOqtqopabfD0dlQVne99VxsQXchjv5nrl7vtfLe9EIGShFJCiPmGhoYA88s0gHg8bt/W19dHX1+fffnQoUNv7+CEEEIIIWZZUTBlhTfJZPIN1y203eyf80+aplEsFu0+TPl83m6W7fP5SKWSZLPY4ZOua0xOTpZX4VPRDd2smjIMHC43iXicmUgEVXXgdLvxer34fH5cLqc9BisUml0tpWklSqVzJyuIUVXVvmwYECtGicXM1QJVVUHXDVwus8orlUqRSCTQdc0OnaxgLJfNoemaHaxY+7f6MQH4AwGKhQL5fJ50Om2HRlbTdEVR2H/llXz84x+nuqqKvr4+AoEgLS0tOJ0OBgcHOXHiJHV1dWSzWRRF5Y9/fM7uGTWfNT4rEHM6nWiaZlc8WZVdZkN47Ebt09PTlEol0un0nHDH2qcVxs1/zWdXpFmVbPMbpVvHWSqsmd1g3fodso5v7Xcl5o/J2p/D4bCDxNnBkzWuxYIp6/grDaOWqoqa/Rotd34ll1dz3Ur392aPc6Eqls4n2Hqzx1zNMVYbEr3ZUGmh+1t/q893oQohhBBCCCGEeLsoxgo+uQwPD9PW1vZ2jEcIIcQFMjQ0RGtr61oPQ/yJSSQSVFRUrPUwhBDnIR6PEw6H13oYQgghxAW1ooqp5uZmhoaGCIVCMmVECCH+xBmGQTKZpLm5ea2HIoRYI1deeaVdUf3yyy8zPT0953an00ltbS3j4+N20JFIJM7rWJs2baKrq4vnnntuznG2bt1KZ2cnyWSS48ePMz4+/oYxvvrqq4tWes9WUVFBfX093d3dOJ1OOjs7OXr06Jxt/H4/u3bt4qWXXqKmpuYNx5uvurqa/fv3E4/HOXjwIMVicRWPWgghhBAXyoqCKVVV5Vt3IYR4B5GKGCHevRRF4dZbb+WRRx4hGAzy13/91/zbv/0bbW1tVFRU0N3dTVtbG/fffz//9V//xZVXXonf7+dnP/sZpVKJLVu2MDw8zPT0NM3NzbhcLgqFAvF4nHQ6TUtLC2NjY3aLgnvvvZeDBw9yxx138N///d/2OK6++mpmZmbQNI0vfOEL/OhHP2J8fByv14vT6SSdTtsLrExMTFBbW0smkyEcDtPY2Mjp06ft1YObm5t5//vfT3d3Nx6PhzvuuIPu7m7q6uqora1laGiIZDJJOp2mvb2dT3/60/znf/4ng4ODiz5Hn/nMZ3juuefo6OjA7Xbz+OOPA9Da2sro6Ki9WEw8Hmfr1q1MTU0xOjpKfX09LS0tDA4OEovFaGxsxO12Mzo6ysaNGykWi/T19dntA7xeL1u3biUWizE4OEh9fb3d6mF+YCiEEEK8G6lrPQAhhBBCCHFhud1uWltbaW9vZ2ZmBlVV6ezs5NJLL+Wzn/0swWCQqqoqKisrCYfD1NbWEg6HeeCBB2htbeWv/uqvaG5u5mtf+xo333wzW7Zs4fbbb6eqqop77rnHrqBXVRWv18v111/Pyy+/TCAQsMdgGAYnT57kySef5KGHHuKmm27i2muv5V/+5V9oa2vjwIEDVFdX85nPfAaPx8P999/Pli1b+NSnPsWGDRv4zGc+M6en5WyKotDc3Mw///M/s2HDBr70pS9RVVXFgQMH8Pv9VFVVmaspl1VWVtLa2orX67XHNjY2xqWXXkpbWxuTk5P2trfeeiuXXHIJ+/bt473vfS+f+tSn2LRpE/fffz9tbW1s3ryZTZs28Xd/93c0NDTw9a9/nQMHDrB7925uv/12Ojo67OM4nU4+//nPs3nzZu6++24uv/xy7r33Xh588EGqq6vfmhdfCCGEeIeRYEoIIYQQ4iJTLBYZGhri6NGj1NbW0tbWxpYtWwBob29nbGyMM2fOcOTIEfr7+zl8+DDpdJrOzk6qq6vRdZ2amhqi0Sj/8z//wwsvvMCGDRu49tpreemll+zVfq+55hrOnDnD5OQkN910E11dXQuOx+PxUCgUUFWV3/zmN7zwwguoqkoymWRkZITrrruOTCZDbW2tHZgFg0F7QZZCoYDH4wHMRVpKpRIAR44c4Re/+AXpdBqv14vD4WB0dJTu7m6OHDliH3/37t187GMfo6WlBTADo7a2No4cOcKpU6e45JJL7G1///vfc/311/Oe97yHV199lT179lBRUUGhUKCtrY2dO3fi9Xqpra0lGAwyPj7Od7/7Xbq7uykUCrS3t9uBWigUwufz8fDDD/OLX/yCvXv3AvDjH/+YM2fOXLDXWwghhHgnk2BKCCGEEOIio6oqTU1NNDY24vF4qK2txe12MzAwAJhBT3NzMx0dHaRSKfbu3YvP5+PMmTOMjY1x4sQJhoeHiUQiFItF8vk8L7/8Mn/2Z3/Giy++aB/HMAz8fj/Dw8N0dXXNWQ1UURTe//73c9ddd/GRj3yERx99FE3TmJmZse9rGAZ/+MMfuO+++3jiiSc4ffo0sViMgYEBDh06RD6fB2BkZIT6+npuu+027rnnHo4cOYKu63ZApWmavb9CoUBjYyOdnZ32WJ588km++c1v0tvbO+c5CofDBIPBOePu6+ujrq4OMBcAOnr0KFNTU/T09NgLAp05c8Y+tjVdsbKykhMnTlBdXU1DQwNgrpKq6zo333wzH/rQh3j11VfJ5/PEYrEL9loLIYQQ73QrWpVPCCGEEBcHWZXv3WHbtm1UVFRQLBbp7+8nHo+zY8cOFEUhkUhw9uxZtm3bRj6fZ3h4mEsvvZSenh5KpRLbt29nYmKCwcFBWltbGRgYoLKykptuugm/3893v/td+zgOh4Pt27djGAaDg4P4/X7GxsYAsy9UW1sbmUyG/v5+UqkUNTU1FItFEokEra2tjI+Po2kaW7ZsoaenB03TaG1tpa2tjdOnTxOJROxjVVRU0NnZSTwe5/Tp07hcLqqqqhgbG2PdunVMTk5SW1vLyMgIW7dupVgszgmi5guHw3R0dJBOpzl16pQdNG3atIm7776bRx99lGPHjuHz+ejq6iIWi9HT08P69eupq6sjGo0yPDxs97jyer10dHRQKBQ4efKk3WMqGAyyY8cOZmZm6OnpoampienpaQqFwqpfV1mVTwghxMVIgikhhBDiXUSCKXE+rFDotddesxuSX6w2btxIIBDg+PHj/Km9TZZgSgghxMVIgikhhBDiXUSCKSHeuSSYEkIIcTGSHlNCCCGEEEIIIYQQYk1IMCWEEEIIcRFxuVzs27fPXoWvs7OTO++8075cWVnJ7bffztVXX43T6bTv5/F4uPHGG7n55pvx+/0A1NTUcMcdd7B//34cDgdgrup31113sX379jnH3b9/PzU1NRfkMXi9Xg4cOGBfVlWV97///XPGa41lx44dF+SYQgghhFgbEkwJIYQQQlxEvF4ve/bsYf/+/WzatIk777yTvr4+GhoacLvd/M3f/A2xWIwtW7Zw22232ff7xCc+QTAYxOl08rnPfQ6/38+DDz7I5OQkl112GTfccANNTU186lOfoq+vj4997GN0dHTY97/00kuprKykqqqK6upqmpub2bp1K1u2bKG5uZndu3fjdrvnjFVRFFpaWuyfTqeTmpoaKisrueqqq7j00ktpbW0FIJ1OYxgGlZWV7N27l/r6elpaWrjsssvYu3cvoVAIgIaGBnbt2oXP56OpqQmXy4XD4aC5uRmn08n27dtpb29HURSamppoaWmhsrLSHlMgEGDPnj20tbUBUFdXx549e6ioqEBVVdatW8fOnTtpaWnhkksuoaOjA1VV37DvyspKdu/eTVVV1Vv0SgshhBAXB+fymwghhBBCiHeKZDLJ888/z86dO9m3bx+FQoErr7yS1157jcbGRnK5HE8++SSvvPIKf//3f8/DDz+M2+2mvb2d733ve2iaxle+8hU6OzuZnp7m2Wef5eTJk3zhC1/A6XTy3HPP8dJLL2EYBvv37+fkyZP2sRsaGrjzzjv5wQ9+wIMPPsjrr7/O7t27GR8fJ5fLsWnTJn7+85/b2yuKwr333sv3v/99/t//+3984xvf4MCBAzz22GO0t7fT3t7Offfdx1e/+lU+9KEPMTg4yAMPPMCrr77Kpk2bGBsbY8+ePRSLRW644QZ+8IMfcN999/H6669zzTXXMDo6ytjYGJFIhCuuuIJsNouqqjQ0NPDUU09x++23o2ka3/3ud4nFYng8Hh588EFOnjzJjh07+OMf/8jHP/5xXn31VW677Ta+/e1v86//+q889thj3HLLLTzzzDNs2rSJn/70p3R2duJwOGhsbOTJJ5/kwIED9Pb24nQ6eemll9biV0EIIYR4R5CKKSGEEEKIi5Tf72doaIgf//jH9hQ9l8uFoiioqoqu6wAYhmFfp6rm28NSqYTL5QLA6XSiaRqaptlVT06nk2KxaB/L5XLxhS98gdHRUSYmJkin0zz66KP88Y9/5Pnnn+eXv/wlLS0tc8an6zrHjx/ntttu47XXXuO6667D5XIxPT1Nb28vjzzyCH19fVRWVuJwOFi/fj3Dw8M8+uij/PSnP0XXdZ599ll+/vOf4/f72b59O9XV1VRWVhIMBnnhhRd473vfy7XXXsvTTz/Nvn378Pv9lEolmpqa0HWd733vewwODgLm1MViscgvfvELfvjDH7J161YOHjzIr371KwYHB+3jP/zww5w+fZrf/OY3PPvss7S0tPDe9753zr4PHz5MR0eHNCsXQgghliHBlBBCCCHERcYwDAzD4ODBg7S1tfGe97yHTCbD4OAgxWKRu+++m/vvv59nnnmGXbt20drayvHjx7n//vu599576e3t5eTJk/j9fu68807uu+8+Hn/8cQ4dOsT+/fv58Ic/zC233MLTTz9tH7NUKvEf//EfNDc3c8kll6BpGrquUyqVKJVK6LqOYRjs2rWLzZs32/d76aWXuP766/nlL3/J1q1bOXbsmH0/wzDQNA0wQ6yhoSE2bNjADTfcwEc+8hFUVZ2zXXd3N9FolIGBAQ4ePMjw8DCFQoGamhr6+/s5fPgwyWSSgYEBjhw5QjabJZlM2mOJRCL4fD4++MEPctddd9HX18eVV17JgQMHaG1tZWhoyA7jrMdkBXbWvs+ePcvRo0dxOp28/PLL7Nu3D0VR3qZXXgghhHjnUQzDMNZ6EEIIIYR4eyQSCSoqKtZ6GOItFggECAQCTE1NsWHDBpqamjhx4gSxWAyfz8euXbuIxWKcOnWKG2+8kaGhIU6fPs3OnTtRFIWjR49SLBYJBALs2rWLqakpuru7MQyDuro6Ojo66OnpYXR01D5mU1MT0WgUj8eDz+fD7XYzMjJCZWUlxWKRfD5PTU0NO3bsYGxsjGPHjgHmdL7NmzfT29tLW1sbU1NT5PN5WlpaGBwcpKmpiVgsRl1dHUNDQ9TU1LBt2zb6+/tJJpO4XC4ikQjr1q1jcHCQ5uZm1q1bx+nTp4lEItTU1OByuRgfH8flcrFjxw5KpRInT56kqamJsbExSqWS/TgqKirYvn074+Pj9Pf309LSYu8vGo3S1tbGwMAAra2tTExMEAgEUBSFVCo1Z991dXVs2LCB7u5upqenL8jrGo/HpQJLCCHERUeCKSGEEOJdRIIpMZ/f7yebzfJ2vSV8u493MZFgSgghxMVImp8LIYQQQryLZTKZi/p4QgghhPjTJj2mhBBCCCGEEEIIIcSakIopIYQQQoiLzPbt27niiit4+eWXicViDAwMLLhdVVUVhUKBdDr9No9QCCGEEMIkwZQQQgghxEXE7XbziU98gmeeeYZ0Ok2hUKCxsRFFUSgUCrjdburr6+nt7eWjH/0omUyGRx55hFQqtdZDF0IIIcS7kARTQgghhBAXEafTSUVFBQ6Hg66uLqanp7n66qvxer385Cc/4Z577uHgwYPkcjnC4TCBQACPxyPBlBBCCCHWhPSYEkIIIYS4iGQyGQYGBnj22WcpFouoqvl274c//CGnTp3iyJEjdHV14Xa76e/v5+DBg8zMzKzxqIUQQgjxbiXBlBBCCCHERUbXdQAMw8AwDHK5HPF4HLfbTSaTobe3l71795JKpdi3bx9VVVVrPGIhhBBCvFvJVD4hhBBCiIvMQw89RCKR4IUXXkDTNPr6+ohEIpRKJY4fP059fT3Hjh1D0zTi8TilUmmthyyEEEKIdynFMAxjrQchhBBCiLdHIpGgoqJirYchhDgP8XiccDi81sMQQgghLiiZyieEEEIIIYQQQggh1oQEU0IIIYQQQgghhBBiTUgwJYQQQgghhBBCCCHWhARTQgghhBBCCCGEEGJNSDAlhBBCCCGEEEIIIdaEBFNCCCGEEEIIIYQQYk1IMCWEEEIIIYQQQggh1oQEU0IIIYQQQgghhBBiTUgwJYQQQgghhBBCCCHWhARTQgghhBBCCCGEEGJNSDAlhBBCCCGEEEIIIdaEBFNCCCGEEEIIIYQQYk1IMCWEEEIIIYQQQggh1oQEU0IIIYQQQgghhBBiTUgwJYQQQgghhBBCCCHWhARTQgghxLuIYRhrPQQhxHmSf79CCCEuRhJMCSGEEO8iyWRyrYcghDhP8u9XCCHExUgx5KsXIYQQ4l1D13VGR0cJhUIoirLWwxFCrIBhGCSTSZqbm1FV+V5ZCCHExUWCKSGEEEIIIYQQQgixJuQrFyGEEEIIIYQQQgixJiSYEkIIIYQQQgghhBBrQoIpIYQQQgghhBBCCLEmJJgSQgghhBBCCCGEEGtCgikhhBBCCCGEEEIIsSYkmBJCCCGEEEIIIYQQa0KCKSGEEEIIIYQQQgixJv4/0E9C94AUsYkAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "image_paths = []\n", + "for img_path in os.listdir(\"./input_images\"):\n", + " image_paths.append(str(os.path.join(\"./input_images\", img_path)))\n", + "plot_images(image_paths)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.multi_modal_llms.openai import OpenAIMultiModal\n", + "from llama_index.core import SimpleDirectoryReader\n", + "\n", + "# put your local directore here\n", + "image_documents = SimpleDirectoryReader(\"./input_images\").load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The images provided appear to be screenshots from a website showcasing specifications and details of an electric vehicle, likely from a car manufacturer's website. Here is a description of each image:\n", + "\n", + "1. The first image is a comparison chart of two electric vehicle models, one with Rear-Wheel Drive and the other with Long Range AWD (All-Wheel Drive). It lists various specifications such as battery type, acceleration, range, top speed, cargo volume, seating, wheels, and warranty details.\n", + "\n", + "2. The second image shows a cutaway illustration of an electric vehicle, highlighting its structural components. The image points out the rigid structure and impact protection features of the car's design.\n", + "\n", + "3. The third image is similar to the first one, providing a detailed specification list for the Model Y Performance and Long Range AWD versions of an electric vehicle. It includes information on battery, acceleration, range, top speed, drive type, seating, wheels, and warranty.\n", + "\n", + "4. The fourth image displays pricing information for different variants of the Model Y, including potential savings and federal incentives. It also provides an estimated delivery date and allows the user to enter a postal code for more accurate delivery information. The variants listed are Model Y Rear-Wheel Drive, Model Y Long Range, and Model Y Performance, with their respective prices.\n", + "\n", + "5. The fifth image is another specification chart for an electric vehicle with Rear-Wheel Drive and Long Range AWD options. It lists the same types of specifications as the first image but with different\n" + ] + } + ], + "source": [ + "response = openai_mm_llm.complete(\n", + " prompt=\"Describe the images as an alternative text\",\n", + " image_documents=image_documents,\n", + ")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plot Images" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import requests\n", + "\n", + "\n", + "def get_wikipedia_images(title):\n", + " response = requests.get(\n", + " \"https://en.wikipedia.org/w/api.php\",\n", + " params={\n", + " \"action\": \"query\",\n", + " \"format\": \"json\",\n", + " \"titles\": title,\n", + " \"prop\": \"imageinfo\",\n", + " \"iiprop\": \"url|dimensions|mime\",\n", + " \"generator\": \"images\",\n", + " \"gimlimit\": \"50\",\n", + " },\n", + " ).json()\n", + " image_urls = []\n", + " for page in response[\"query\"][\"pages\"].values():\n", + " if page[\"imageinfo\"][0][\"url\"].endswith(\".jpg\") or page[\"imageinfo\"][\n", + " 0\n", + " ][\"url\"].endswith(\".png\"):\n", + " image_urls.append(page[\"imageinfo\"][0][\"url\"])\n", + " return image_urls" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download Images and Text from Wikipedia" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "import requests\n", + "import urllib.request\n", + "import time\n", + "\n", + "image_uuid = 0\n", + "# image_metadata_dict stores images metadata including image uuid, filename and path\n", + "image_metadata_dict = {}\n", + "MAX_IMAGES_PER_WIKI = 15\n", + "\n", + "wiki_titles = {\n", + " \"Tesla Model Y\",\n", + " \"Tesla Model X\",\n", + " \"Tesla Model 3\",\n", + " \"Tesla Model S\",\n", + " \"Kia EV6\",\n", + " \"BMW i3\",\n", + " \"Audi e-tron\",\n", + " \"Ford Mustang\",\n", + " \"Porsche Taycan\",\n", + " \"Rivian\",\n", + " \"Polestar\",\n", + "}\n", + "\n", + "data_path = Path(\"mixed_wiki\")\n", + "if not data_path.exists():\n", + " Path.mkdir(data_path)\n", + "\n", + "for title in wiki_titles:\n", + " response = requests.get(\n", + " \"https://en.wikipedia.org/w/api.php\",\n", + " params={\n", + " \"action\": \"query\",\n", + " \"format\": \"json\",\n", + " \"titles\": title,\n", + " \"prop\": \"extracts\",\n", + " \"explaintext\": True,\n", + " },\n", + " ).json()\n", + " page = next(iter(response[\"query\"][\"pages\"].values()))\n", + " wiki_text = page[\"extract\"]\n", + "\n", + " with open(data_path / f\"{title}.txt\", \"w\") as fp:\n", + " fp.write(wiki_text)\n", + "\n", + " images_per_wiki = 0\n", + " try:\n", + " # page_py = wikipedia.page(title)\n", + " list_img_urls = get_wikipedia_images(title)\n", + "\n", + " # print(list_img_urls)\n", + "\n", + " for url in list_img_urls:\n", + " if (\n", + " url.endswith(\".jpg\")\n", + " or url.endswith(\".png\")\n", + " or url.endswith(\".svg\")\n", + " ):\n", + " image_uuid += 1\n", + " # image_file_name = title + \"_\" + url.split(\"/\")[-1]\n", + "\n", + " urllib.request.urlretrieve(\n", + " url, data_path / f\"{image_uuid}.jpg\"\n", + " )\n", + " time.sleep(1)\n", + " images_per_wiki += 1\n", + " # Limit the number of images downloaded per wiki page to 15\n", + " if images_per_wiki > MAX_IMAGES_PER_WIKI:\n", + " break\n", + " except Exception as e:\n", + " print(e)\n", + " print(\n", + " \"Number of images found for Wikipedia page: {} are {}\".format(\n", + " title, images_per_wiki\n", + " )\n", + " )\n", + " continue" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup Qdrant client for indexing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.indices import MultiModalVectorStoreIndex\n", + "from llama_index.vector_stores.qdrant import QdrantVectorStore\n", + "from llama_index.core import SimpleDirectoryReader, StorageContext\n", + "import qdrant_client\n", + "\n", + "\n", + "# Create a local Qdrant vector store\n", + "client = qdrant_client.QdrantClient(path=\"qdrant_mm_db\")\n", + "\n", + "text_store = QdrantVectorStore(\n", + " client=client, collection_name=\"text_collection\"\n", + ")\n", + "image_store = QdrantVectorStore(\n", + " client=client, collection_name=\"image_collection\"\n", + ")\n", + "storage_context = StorageContext.from_defaults(\n", + " vector_store=text_store, image_store=image_store\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Images and Text Documents" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.10/dist-packages/PIL/Image.py:3157: DecompressionBombWarning: Image size (101972528 pixels) exceeds limit of 89478485 pixels, could be decompression bomb DOS attack.\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "# Create the MultiModal index\n", + "documents = SimpleDirectoryReader(\"./mixed_wiki/\").load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ImageDocument(id_='354a7edc-fe0b-4706-9c80-371eacbab684', embedding=None, metadata={'file_path': '/content/mixed_wiki/1.jpg', 'file_name': '1.jpg', 'file_type': 'image/jpeg', 'file_size': 5631170, 'creation_date': '2024-04-27', 'last_modified_date': '2024-04-27'}, excluded_embed_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], excluded_llm_metadata_keys=['file_name', 'file_type', 'file_size', 'creation_date', 'last_modified_date', 'last_accessed_date'], relationships={}, text='', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n', image=None, image_path='/content/mixed_wiki/1.jpg', image_url=None, image_mimetype=None, text_embedding=None)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "documents[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create Multi-Modal Index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|████████████████████████████████████████| 338M/338M [00:02<00:00, 125MiB/s]\n", + "/usr/local/lib/python3.10/dist-packages/PIL/Image.py:3157: DecompressionBombWarning: Image size (101972528 pixels) exceeds limit of 89478485 pixels, could be decompression bomb DOS attack.\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "index = MultiModalVectorStoreIndex.from_documents(\n", + " documents,\n", + " storage_context=storage_context,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create Query Engine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import PromptTemplate\n", + "from llama_index.core.query_engine import SimpleMultiModalQueryEngine\n", + "\n", + "qa_tmpl_str = (\n", + " \"Context information is below.\\n\"\n", + " \"---------------------\\n\"\n", + " \"{context_str}\\n\"\n", + " \"---------------------\\n\"\n", + " \"Given the context information and not prior knowledge, \"\n", + " \"answer the query.\\n\"\n", + " \"Query: {query_str}\\n\"\n", + " \"Answer: \"\n", + ")\n", + "qa_tmpl = PromptTemplate(qa_tmpl_str)\n", + "\n", + "query_engine = index.as_query_engine(\n", + " llm=openai_mm_llm, text_qa_template=qa_tmpl\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "query_str = \"Tell me more about the Tesla Model X\"\n", + "response = query_engine.query(query_str)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The Tesla Model X is a mid-size luxury SUV that is fully electric and known for its unique design and advanced technology features. It was first introduced by Tesla, Inc. in 2015 and has since become one of the most recognizable electric vehicles on the market.\n", + "\n", + "One of the most distinctive features of the Model X is its falcon wing doors, which open upwards and allow for easier access to the second and third-row seats. These doors are not only stylish but also functional, as they require less space to open than traditional car doors, making it easier to enter and exit the vehicle in tight parking spaces.\n", + "\n", + "The Model X is built on the same platform as the Tesla Model S sedan but is about 10% heavier and shares approximately 30% of its parts with the Model S. It offers a spacious interior with room for up to seven adults and their luggage, thanks to three rows of seating and additional storage in both the front and rear trunks.\n", + "\n", + "Safety is a key focus for Tesla, and the Model X is equipped with a number of advanced safety features. According to Tesla CEO Elon Musk, the Model X is extremely safe in terms of frontal and side impact crashes and has a very low risk of rollover. It comes standard with Autopilot, Tesla's advanced driver-assistance system, and has an optional full self-driving system. The vehicle also includes a collision avoidance system with autonomous emergency braking and ultrasound detection to steer the car away from potential threats.\n", + "\n", + "In terms of performance, the\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Show sources" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "**Node ID:** 7ba72e19-49ab-4d1e-9163-af051b680935
**Similarity:** 0.9057627702994282
**Text:** The Tesla Model X is a battery electric mid-size luxury crossover SUV built by Tesla, Inc. since 2015. Developed from the full-sized sedan platform of the Tesla Model S, the vehicle notably uses fa...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/markdown": [ + "**Node ID:** 46ebd206-a399-4ccf-ab8c-f3cbfd1cd5ca
**Similarity:** 0.8963384516104667
**Text:** == Design ==\n", + "\n", + "A series production vehicle was unveiled on September 29, 2015. It has a panoramic windshield. According to Tesla CEO Elon Musk, it is the safest SUV in terms of frontal and side impa...
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzYAAADhCAYAAAAanCBLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz96ZNl2ZXdif3OcIc3+uwec0QOkcgJSMxVBaDGJlmc2shutrGlbmuTTJSZTDKT9E3/hb5TJqnbWjSKrCabZHGsgagBKKCABJBIZALIISIzMmP02d98pzPowzn3uSdqTGu1qCLerkJEuKf7e/fde+65e+219trCe+9ZxSpWsYpVrGIVq1jFKlaxir/AIf9DH8AqVrGKVaxiFatYxSpWsYpV/E+NFbBZxSpWsYpVrGIVq1jFKlbxFz5WwGYVq1jFKlaxilWsYhWrWMVf+FgBm1WsYhWrWMUqVrGKVaxiFX/hYwVsVrGKVaxiFatYxSpWsYpV/IWPFbBZxSpWsYpVrGIVq1jFKlbxFz5WwGYVq1jFKlaxilWsYhWrWMVf+FgBm1WsYhWrWMUqVrGKVaxiFX/hQ/95fsg5x+PHjxkMBggh/uc+plWsYhWrWMX/hPDeM51OuXLlClKu6lerWMUqVrGKn474cwGbx48fc/369f+5j2UVq1jFKlbx/8V48OAB165d+w99GKtYxSpWsYpV/P8k/lzAZjAYAPDpz3+RKztrKKUoVR8rUzyQZl3WN7cQwuE9GGP58IP3cM6y0e3y4vMvcPXGVWqpMFojBBjbYBcLxo8f4KqGXpajdIKtG8qqom5q1rMUNTrk9ffe5ec+t81epwThwXkeHlvulJeoRI+Dg2NeeOWTfOrzLyNxOO8RSL71e9/k6GCM845yMWc+HQMCEAgBeHDeg/fnH1bEP7wHAdYZFpMxr7zyaV5+6XkE8OGH73F6ckA3STkZj/hg/zHOe6SWrA0HDAY9MqHZGK7z2Z//eZz0rCWWrXTEj/7ga8zPxpiqxHjNzZ/7r6nTXe6+80N+73d/i87aJkl/AMKSJAlJopBCkOcZu5f22NzZQ6XhvAsBQgiklEilaOqah++9h2kMeIGUgk6vT57lzGZTqrLEx/PjvcfhEVIivMQ6ixASqSSmafDG4cMpChVf78O5+kiEc1nOR9TTRwyHXeq6oGoK0jSjaQyd7jZvfvctaBye8L4be5cwjWU2PsE5x81nnuPKtatMJqckWqG0DMdnPI+fHFPXnp2dLaSW1HWD1glKCRbzGTeeukKv10UqhVaKo6NDntw/4NM/8xm2r1zBWcdoNOKt199kfjbCGIN1jsV8gUTi46fcu3wFISAZroc1nWZcuXyFt370QxaTEcVkjFKSKzdv8cwnPwU+VMURAiUVn7j1PArJYj7jH/2D/ydFMWO4dxnrLe0SE0IgcAyrMc93Nf0EbFEhjcc7R9pLmaY5D2zOxCd4Dy9++mWuPn2LLM/xzqK14ur1y8srUE5nJEVBIj3OOrwrUK7ih9/6HfrPf5n7g1d4UiSspQllY7DekyeSBwvD/YXgdjLmM/kBAkdT1bz2B9+mXBTINEGEm4FiviDJFF/80ufZyDP6SiEEzGYF3fUNnv3MK1gPD55Mef2dQ4z3iFSh8MxGE17/3X+GPbiP9x4pJEmieOmzn+XHb7xBIhx7g4xMeq5tJXzpdhchGqQQCClBCd49gPuzLkIohBQIBELIsP5lWCt4sMZirUFpHb72jiTLGG5u4r3k7OSUumpQSQLxd601LOYLdJJxNtd4uc7W7pDdPYUxDcZZlJAkiaZczLn/5huk8boniaLXlRR1TSfVcT0Yti8NQCuyPCfN+izKcGxSSRazkpwhX/nr/wtqkrCfEdaSiGfc/8R9Flhywcl4ynsf3uPVb3yDs8MDtq/dYG1zgJAp+48eUcwnDHp9kk4X4eHVr/6L5d69ilWsYhWrWMVPQ/y5gE0rP1NJQpKlKKWwQoNI8IBOUtIsQ4ggW1PKkWc9vPPkgz4u7WKTAVIrEikRQpBnEp8NwEqEJyQjUoFz5M6BEGjAjHe5NbhE2U84YAGuwTYVZ36OzIakSqOTlCTL6fV7SOnwDiSKXq/HWVLgvEMnBqUSAHwENgIQMcnnIrYRF/5hBUIlTGdTvve9b7CxuUFRG7auPI2tLVe3rtPfvcV7999DCEPSTVGdDscnZ2S9dXrDIUmW0NcN66nm0qVN7CBhvig5m3rSvIfIO2R5TqJkSNp1AkKhk4Q0VSgpSdJwjpMs/I2ULbJBqvAzQkjyvEstK5z1aJ3Q7fTQSUJuDN57vHcAWDw2JpqCAPCkUngXToSLCRcyACfVvt+FcM5jraWpNMPBBr2uxrka5xVSgFKCLM9QWsf3DeBIJSne1wip0EKRZzn9QRetLc5ZBKCUwntP1Vj2D8Y4oRFIGmfwTqISQZrn9HoZWRpQan+QUhQ5SZYwWNtgfWsrHCOOJEmRiUZ5j1aaUtYRsAEiHJMULia9giRNSLMMqVQAfFIhhULrlDTNwuKJpyhNUrK8g/ISawxSSlItWU813oklbhYCtFfsLCyDaUGSKvJEMZvN6XRybFlQLjxVkkCahfuk26XbDcANr0lTTa/bCcBPgHCWXCuktygpsTZH2AalumTZkO3BGlUi2e5nPJ5U4CVbQ00zq+iuJaxZuLwp8d4xnczQSYpODFK34Fmg04ba1Lzx2g/Z6fcYaoHQCULCF599lv5wgPfQmTjyXh/jPU5JtIe1rQSh+0idgLNIBEopNKBxaAGJEGjleO7qgH7P4UWCEB4pAKmo6gWTMwdeIORPSGE9sRhx4d8RRDohEMw4PTxZ1isg1jEEy8+nPKz3cwZJg+CEbrchSS4hVIKyCq0VUglcHfavJOAMOnnCoJ9gxg1pqjGNResMU1l6eUamU5TOuHLrJtYJtJJUiwXaZ3R6PbQPwMZ7j4jH/JOgpr3nBIJZbcmzHOHDfqB1gk4ypEqQKkHJBJWkaJXQNOYje/cqVrGKVaxiFT8N8ecCNm2EKn9wHPDO4qULyamW9AZdsixHJRqEYrC5jqsNSio2Ll8h7Q+JNXsEIKXAConqrYfX9Q6BOMcXHqyAKi0R3SGFSKhYQ2iBkx6Smp1OF49F6g5r65t08g5gwXskiuH6BsOZxzuL0imNcThvsdbhncNZE/72DhETCiFErOKDjN4K3guKuuZodMLD4yO2NnZoTEK9KNgabtDVKWuDDeblDIunqC1OpdRW4oVCKY3UDqVTkjRFGcmiEeRdff5eQsbqcwAAUgi885jGIFONsxbnHN6FcyUJyEwIgYjsgfceaxzeiXjcACKc8QiC8GIJ5ETU3jvnA4BB0DjbklUXsV4AT0oT2K5wDb3zlFWJlJIsyenqDJOV4AydbofRZIqQkkRrnA/XBQ95mjIzDVprFBKpQq1aIUiTDOcsWZLicKRJgtKKsqxIshxjPEp5vPMoKXGNwSmB0holJUoqANI0JctznHNkaYaUgizLsErFz6tI85SmqZFC4rHgZThfF85bqKR7jIMmsj0ej/DhvLtA3YRae0yWpYNNmbCDwPjAxhDXtvYWUTWMMcimgcYjvWA+qbAS5sMBDQJTG5yP19w7fLjr4voEL0B4AULiECgUzjlwEo9iOnH4J2c8mT3E6w5PJopSKIzQ3Cvhw1pSWMvPb1oSJXA+rgV7/h4t+EcIlNKotMeskRjnyYAklaATbFwoTgqsSnHWIIUCb5BO0RtuM9m/c2FNeYRwy7vd48i1xNsa4xKSNIJg4UFIKuvieYif+Y9uTOGV2r9j/GRS771fgoePggiPFo5eLhE4lDR44cDbyKgIpAz3agBWge1EOJwzSKlIEslgmFFXhrIoEVoidEaiCUwZElsswHm0M3GNOZa06J8UPu6YIt6nPqzD9n4WQoZ9IKK1lom2TfOnvOgqVrGKVaxiFf9xxscCNkpniKSD1Cm5ykhFSlXXlOWCs5MzpFQhoezkWGvRSqOTBJTEeqKMhGXihAiJpHOBRWjTkmXC4aB2ntpYyrpGSbWssnsBXaVRMiHJcpIkRSm9TGYkksFgyPqWBOHpra3RX1sD2sQ+sg2NoSpLnDEY02CbJkhQTIOzNqArITDWUZUGL8A6ydnZGNcYOkrTWIcp6wjMBN5LpE5xPjBPQgmQQealtaLG00kVnU6GkB7rQwnYIy/kOCKeEYFzHqXCZxcyyOS890hEkOy0cpaYdJ9H+P2Q9IEg/L51Dnx7zsNbCQLb5rzHuwtF8AuAT0q1vFZAYIkQQaqjUhKtGHT6YBsSrUOy7yEVChPQV0hWnUcJQaI0SilUZIWkkCgJWI+SkOmMVEm6WU5jLI00IORSGqe0IkkSZARPOpE4bxAyfD/NErz36CRBKYnOE7oypaoMSOj0cmQZ5HhtSV/QgstYlicAQCGDHDDN8+V/aUF6kCD5pTxNe8FQwlBYdKKAuG7DD/PoVHBWh3frSk0qQAsHUtEojbOe0pRBKohv8eAFOWCULAmQgiDLiiUHIcHUjt29W8weP2RTHFKqlIlIkTLIy5R23NSKazeuc3OwgRCh0CAA6z1CKlz7KQVRQgh146iso0gkXV+hSof14IXAO0dVN5xNZqS5JkHQzA2+sfTWdhnHtSqFQCoZwLkIibgUAuPg9XfHpHrItatyCdSddziZkOQ5TV3jjF3e4xfX5nIte37iHvhotIxly9rg/bK5XguPUgKhJAaP9S7KPCVSiLAfYAKYAJSS4B2pkqSJQmsBpCzKAuEkxbwh66ZYYzAOiumIw8ePubx9AxU/c3unt5/hj7I2Yvl+zjmEkDQ2gCkpBUIowh5qY3EApPiz0NIqVrGKVaxiFf9xxscCNr3hJrq3iUdgm4amKXHGIpOERGmEkJjGUBcjDg4OwMNwfR2d9+j115YJefsQN9aEKrixQKiDttEmNqZxlGUT+gd8E37XB+bHuACW5tM5VVnjnEfKtjrusU1NVSwik2FjUhSkcELKIInRmk6eB9AlLiSzAry1lIs5s+kEIT0OR1WWmAYmkwkCWJQlxlqklNSmorOW0+l2KBclbQoaEhCJkAprTQBPi4KiLhjsWkQSK60t4CMCPnzoA3EB3PyRlM23VejwpdKBVQnnKlalL/bhSBmSo/jL3oOWoV/CRZkY1gWe6kJluw0VZYQhoRfgwt/eg1BQVDOU9EFyU5ZUZU3fB8ahVbaJKP3y8dppHZZgmibMxxV13SAQpJkmiBEtu7sDhJCcjgoGwzV0EiR1VVmSphk6EYynU0azGYtFjVCCNMvodjrgPb1eB5VozNzQmJo07aC0QCUKaSNYW8oPW9gQTzAOITxKBYZCJ5LzM/LR69CCZu89qZbcfP4WnU4fqRXeWUBwsn/Ig8MnjJuG0hpSIMXTF4IEz9yBUwqlBYlWICND41tmaPmG4ftROmhdkG45H9ioa8/fxlYV3jreeP0JmbzKZO8as9Exn76V0e03DIc9Um0RQhPYA7+Uc3kvIkiAprFIqTA2gGecxDiBkAkohYtcrHVQNo7KGXAG6STCCXQ+ROoE6cJ6UkqxWCwCg6UUQmkcgkYp0kEfqR1NWYIA4xyj0ZTRWUGuk7Ce43Vq74WImbnwnXD/+qUurd1Ulp9vua6FRwgVZKDCIqREJ4oGIkvkSZPkwjWOJEv8Q0uJ1wIpHc6DyjRCCkzjcabh6NFjrtzewFtHsZhRlkV475+UiF2gSD/CNPlzFjuwhWCtJdEBzLZg1HkXCxyeP+7lV7GKVaxiFav4aYiPBWzm8wVqFLT/zgdZlLUWZTxlWQagkCRk3Q47e7s4Z+kNN8jyIC8SUSfRVv0DYxJBC7H/wwV5hhNBmmadwSuBUApBbCi2Fu8dZbGIkqTQB6GkQgfFTqj+JwJhm5CGC0+aZTjjA4ByjsZ7jDGxzyZWj1Xop1AR+PSGQ/pr6wghuHz9ZpSHNSwWBbPplGI+YT6bBoCHwpSGuqhpKrOsCgdQo5FCADHJ9w5TB8lNqPhLfKzM4iHAs5bBEeEbXiwlOcskzXucC+SXcw5rzTJBkjIk4W1fQjAKaMFKyzkEwIL3WGeW17oFTB4CM4SIsrdQ0Q+XMhx/C26klExnZ3hvQAm0VvE4QlUf2oQrXOvA9wjqqqIoCs7GE7qZItWaajGndHPG4zHrG+toJZhPj7lyeYDWUNeOxdzwZP8Qay3WwcbuJkIpEAEwpUqB9yRKkqY5/cEQaxtor7PWZN0uUgbwKTlnw4RoZT0tK+AjOBSht4UW9oCOyWQr8/OAUB6nFLLXYzabAIqsP6DxhzTWMrOOwliUCu954B1ZKugrgdQJyZI2i7RM/GcLwNuivvWeMsopcSA9KCHI19bAGc72R4wPE+TNaxzXA5SdsXt5nSwrg8TNeqwy4V6zFiUEjZMtjXC+bgDvmiiFVDgvESqAZR+ZJOthdjqGxRRrDflgg/725cggKNq+Me8MJwdHeN8ykRqkwHjD6VnBtUtrWAvGVEitKMsGU1oWoo6AxZ8zbO3XnAPSloxpz1m4fkHaKYQg73fCyo+g0AuHUpIkCcUEpEDh0UKF+9B5vAySQkGQIAoEaaJJtccYi9YaqQNAQyi80FjrKcuCD995B68Uk/kYqSRJqqOWMFz79n70wi8LAO3nuIiirQ3g0xhDlutzWeKSyXPxI8sVsFnFKlaxilX8VMbHAjZ1WbMoKpzzGOcQQpF3cvqDDdZ3LpFkaWy2Fqh0SlksuHTjKhtba9SVXb5OW9X21iCcQeGRSiCFJGAOi4ngZWNng2eeu4G1DbaJPQ7Whr+9wzvLfDanKMYs5pMAbAhJts413Y2M2jiscygLeIXHolVotlc6mCG0Dfj4kGw0TRPYFWPwXmCNi7IfQZb36A/XuXztOlopnLcUi4LxaMzBwSNmswmmktSVw6EQBMmKEMEdqaprpNJ0+ypIiBy0s1IFMXFuk+dWlec91roliAjV9Cg/QhIwYeg9IZoCyGgoEPpxzmWAQlzIzGM453B2qc8570XgwkHEEn7bMyLinwI4OjwilQYpDdZZkjz2JMTejYuvJ5Yl7wDUjDGYxjObzJnZCu8cnU6KFJK6toxOz4JkrywopjOsrahqB1JjjA2fm3N5oxQy9NwogfcC0zQURcV4NAZgsDZESU3e6/Hk/n10krK+vrGspAtBAArtFbnw/QASz6voUghSpUOy2yqAfKj8J1kfRIc0EzihMY3DW4dxHiPAxNexHoQIwD3AvQD+rbMfSWzb8xgPK8rRBJUU4FVgHzzkEI4Jzd23T9DpZc46XWbTKdm8QeucJLVBWuXdck34yIR6zlk+ImCVgCIUCVId16CUqAhefVwbiVI4laKkpp/n5NJz+foz5F/+O2TCkmeKO6/+G5QM72OjUYgX4Lzk/ccLbt3okch2XfpQJLASG8/3xax9uTRbtur8y3P26cKak0q2bTN4goxLSokXgkYE5isNgi6UAlzov5OOwFCKIFcESBON0oY8T8mTDk6nOKnoDHLqsqQ/7HM2mnJ8OgIpmJUVaxu90Eu3pGc+en0/el8K/IVveRflea69ZvL8ui2ZWBHPm2MVq1jFKlaxip+2+FjAxngQSYf+oEev16fTyZcsTdrpxOTCgRMY01DVdWBEEMFsAB9teUPPgm9AOMNiPkPgcNZijInsTkiYdJqQ5UMEGkESZTLnshwHFEWXs6Pj0IOjoGURtvd26G1tBnLDe1ystkJIkNoeiVaF00pPiMyQkqH/I1E6sDhChh4UogWy8wFgycB6SKkQQFEVnI7GHD58hFYqkisSLzRbV26x6G3QGI/zmkJn+PagfFtxPY9WSpZIhfQCKVRszBatIokWiLQiKb/sG4gMRAQ7iPMk3cUEKYnWuMa2zf0/oc1v2QvAGbv8zwFEiGVSLJXC42msQ0qBaQTlwsF2m0W6c0Drg2zKOIsRIdkkJtJKdSjLEofCOVCopVRRasXZ2RQhHUJqdMZyvSRJAhHshqRcLNmEcGrDmnA+JMogKMuSprF4V4dzL88r3UrKeN1CX4jWenmMsc0pJPZekEoV4QiR/bPIfh+ZpRgcMusgHEgfGLHaBdmSFJLz/FOAUsEZUAQQ2xhzAYiG6y0uro7IXHghcVGCqb1H+ZASnx3OeHzXsPX0VR4hmB+fMbh+hcdPGp59ppUpxkQfgYkGFS2YC5bLQS4mnUN6C0JFswFQUqC0WrKKDodwDU0xBTzjx1OKkwfkeY9bTz2P95ZickCS5ShRI+PadN6hCOBiVAi++8YJn3t+SJIEMwMfGbFwa15M38NtwPIIPorWl1JMzsFBYDPa78d7XUCSJsjE4mxwAKydX/4eXqBkYCtlZFcknkxLlAjMlZYparDF1Ap6HYWazxn0O8zKCi0li7KiM+iSd4JrXzzAj/z9EQnkEvd8FMR5D940IPJQVBAi9h6KeLs7nLNLieIqVrGKVaxiFT9N8bGAzcblq+xdux60/4SkpGlMaPA1NULEpl/naeoaU9dRCiSolcAYS1FUGGux1mLqgqNH9zHGRobCo7RGq+C0prUi1ZJExyppBChtdd4D0ju0DLInIQVCxmSFAF6UCBIP337YZc6wFHlFskQA6iONux6L857S1CHJi0nG0cNHPP7wPt1Oh+FwneHGOt3BAO8d1tRIpbi01mEtvc7O9jZKgTVzamEYXn2G4RVHZQTWJ9zfl/iG8wp80Jy1R7ZUnKVa000zJNGaOSbFwkuEihXdtmYbG56FkLQ1eOdCszZK0VgTEn0PYd5NAGlcSIb+SGO2/6OOU221OO/3WFt/DmtqnKmYz06xTU3a8VFupHFUy94mKdV5wo4IbnHdPvlwJ5hOyAqnoiOdimAFyHpdnAqWxx4PUlNWFpBUjWH+4DicO6Ev9B4FidV0OqeqHNYYysUUT8L4ZEqadWNTe8us+XPwjKfb6zOeT8BJmsriXUhwVZRV1VVF4x1eimDIF04OOtGU8zGD3QEWhVksWIzPMHUFSIRQyMh2OKIrnkrwUmE8lFUZQbSPACOwT76Vp/2k1mr5VXT1E5I7bzxB+y1sv4edLeh1etS9Hu89OuLZWx20t0vmIDAC7bUNGCve5pGfiA5eQiyBXaJbSWLIuG3TkOUd8vUthLMo78jzhFRpEm8xpuK97/x77HyEy9KwHiPbQJS7ORT3jwzeTvjciwO8dDjLOahrZWi+dQv7CerRn9/bF3FO+ymFbKVfLTPlQw9VoqMEDdobSiLwLXgHnLXxmEMRwbkgYe10ckSS8eh4jEv75B1NYRzrQuCkwDiDzjTlYkHVqMgw/hnxEx+rZTfx4KyJ93YoxmitSXSCMQ1NY7CujD1dq1jFKlaxilX8dMXHAjbWNJRVjbOhkcX70GMjjaJX12GeQuwHCBKxCZPxGWtrKWfTCuN+ovrogvNQ4+15LtI0JN7T0zr008gAUEQsvYZ+YBfkYYBwAiVUSAyXsvVzdkKK2HTTgoV4CH4pUWldkiIoEOeuRMsq70VmAkETZ6sIOcO5J1jnSNOEQX/A2tqQrZ1NdnYE0/EMd82Q5x3ydB1DD5MofDXBYXEiyocuqGv8uW4mHEGUrSyqmrJuyK1DJnFGR8tGePnRSj4hKesIhYrylcZYvHPR/Speowg0Wih4Md26aIt7/qJiaadMZLgcsH1pj89+/lOBaYtr4vjkiKODJxw/ehSSYaWXrJFS6vx9pODKrZt86a/8MpPpFO9cBLoq9g35YHUNGONwBIDWSm2cDYmxd0Fu6JxjPh6hdEJrdLC2scXLn/sCVW0wJvQ+BSmjjYyBZfT4AcKGBn88S5AupGawscvWpQxrHf3BenT/S6jrYKnrImOGIB6DRVqHxCOFwjqCixUCayxZt0uehVlCIDDRxjfJUnTWDQyd8Witlue7TbLbuUPxEmCMoWmayGAGxzsLTKYN99+Zs7X7FAshqY6e0Es7eOE4bTRHJ56tTR9ZrnBtnbWoyLpqmZAkOvSUWUOmZAQBgqZpEEqTJUGy5VvG08PZ/hOSpkZLSSIF9VmJdA3508/wzg++zfT4PqmO0qkIVpz1WG8i+yPAp9w/dfi3F7z07BDjQn/O8qa8INDy0fjho4YP7X3haWFZ2K+WZvXhp5d9X4Epsx4a16CWv8MSBLml3OtCX4+AxlhM3ZCklrLxCC3JhKAsK2alpqwdzoSeLmfCv/EKL8Jg3HZobfgEgnhS4n605IwQBLbYEe4HKQPATLFgHaKuyLxAVDWeGux5v9wqVrGKVaxiFT8t8fHm2ESpmPAizIOJ8p4sSn6aqqIsC+aLBYeP7zM5O2Xr0i6N2cJYg0efV1iFBGRMiM6rxhKojcGXBbl3dKsmqIQu5NehF8EjfvIAxU+wClIi25krF+ZfXHQlar/t/Hnvyjl78hMvH4GNF+CVJMkyvE+QNqSo06JgWhQ82j+gqmv2drZ45hNP41wW23oVTq/jZB/XVDhT45kGcBNlUIKQBAutUFqTpwmJkCR5BkmCb1vul2xMW3tmaSrgvEc4x5qzpD5h4iXeeeq6Dha17TlopWF8lKFZslYi9PvICyBxibviuXMx0dQCvAw+TVJJ9i5dJk0z5qNJZJ1CYqhiD5aPjc/tVPd+VyPIlj8nW5e6tocgysIC2Ai1/YuJXyuf8t5zdJCiE728nr1+l5c//zy2pb88Syev0LPk+Oo/e4KZu+U197GnaTad8fyLt/nEp18EgnPZ6GREXYfEMcz3OU+6W8AVBozGGUlopNQkeUaaZ3zqy1/ilMAkSiFxyCidU3R7wQEO79Fao5MIrOPnV/L8+onYfJ8kgAvMjnaeBHj/vUfU8w7+2SGNrcmaKfPihG51hUU24P2HE3a3Upyog8WxkDjr0FrQNI48T9FaBzdBZxGpDmxhNAFpZ1klShEmEDmkgyuXrpDaAD6zTCOdRVBz581XGR/cI8s66EQzXFsnSft08g6DXjBwUElCohN0LGpUOB7Wiu2nNnBWBYtoqaMtuY1A0dBUJaYuqBZjqmJM2wF1Lk27eDd/RNy1XFN5f418uEZVF2Eo6MJGIBPmS+HBWhN+o2VwZADOifIkWU63r0kG/WgfLmhMQ7fXA5khgGpRIl2U1CLj6uPCOhaRNjs/7mXBg1AwaGW1qjUG8Q7hILUGbd3yHjB2xdisYhWrWMUqfvriYw/oFN4i0HSyHBAspjOOTk4oFkWoqtso32pqTKy+LxPw6HQWwgWAIGJjPMuaJeCxzlNUNdOioGwa0jbR9URYE3vuW+bAu3OVSnu8zmGtPa/lXmAgpBQRVLnlXBQRfwbng4tX2zzfSuFbxkGr2PuTAcEVyXu3tFm2ztM0lqPTM0bjaRg0mYbhnI31ODQySVGpwKkab84/Wzg1nuefeobD/SdYnSCVRiYZTiZYnSGkWmr9l59bnAMcPKEnwhh04tE4pA/N6J6QDHvnl9Xvtgm5jdZ1qz0kBxjn8SZOM4/nOEhjfPzZC5VwAmDZ2Ngg0VlgRWxonPdAY0NT/Pm1kLEvJgJQJZbfC9eN894eT3vlw9fL/gQPIqwpJQO4Wn4e2vkeYaZRAH8tZRfOY9ssr9Lg4KekjGDCo1NNooKrVjlf0BhDXTfBpQowNkiEPHHuCpJRLZmWloGQ2NgH1FgLWpMN+/QROHG+HkXs4+l2OssFF5i25jw3h49cJxnnCGkZhpw658FaGmO5+8YJw81bTBONHR3wyZc2+PoP38WdneIvX+HDU8fnTY5S9fm5jICkBbJ1XbOcoQI4JNY70ojAnRdR8hhvZO/p5jkJEi0lWglS6Xl0/w6dLGHjxc+Qpjm9fp9BbxD77TQ6TaIboSRN0yCFkxIpQi/SczcMMg5PJbJwLvaEOe+xxmDrmqqY8v6PX+X0/psoQTS5WJ6tIDlT0WyD831Aak3hPAdPDhC2YjBcR4gMcHGNB/kkEYoIACmoojQtz7tM5iV5f5Okm+JsMAnBCW7cukEjc+qyofqxwVlLolMuRuvI2K7W5Z+tywE+7lXB7MHH9emdw0TGuY6GKxqB9IQ+nFWsYhWrWMUqfsriYwEb5yypUnTznP7aEOEFtqqoqgpZFgh1/nIXG3lVlDu5j5T7L7AFbSU+/GJkL8RS6uajxKe1YJbIZbUd57FKLuVbbSIGIfFTWl+Yyt0yETEZR+CMC0yEcISZESHhDH0K51p8liyDJ9EaKWVwrQKc8Og0RXmPt45MhNkoSklcBGR1U1PWFcWiIEny4CAnggub94EVaVPIJEnoOcOnb1zm1Qf7OKmR1mOcj7r+yFeIIEOz3iOcwAuFlooUT+Y9ddOgckciBf00wTqLk5K820PXFfPplHYIYrgkvlVi4c9PIxDOv63rj3zPtc503sXhoLEGLs7le6ZuYvILxjs6WZes28N6TzGeBne9WJleSnHar0XorfLCLU0S2lx1aVndAiDfdhN5EqVCAnsB7F00lfBeIKwP9roCvAugTCdplJhFVzgcAk+aZqRao4zlbDpjMS2WVXx8K26Kcj7vkTphcOky2WANhMRHM4y826PQ0TVQa1IdBnfa2LTf2knDOWsmIzsQWKuPOl21JgxKCZra4G0AZ/sPjpgeK3ae26D2lssDz1O3h3zvPc98coq6dJmJzDk4tly7fM4AWRvAuZAKY+xSNqiTHEcCQuOEQSUZQjhUkqGUDlfde3CCXpbR6+R084RunnL45DG3bl4ne/7pMExVKbI0QStFVVXxveO1li0b0jruOax1eJWcy9aiDEwHum/ZgwaeATt0Nzf51r89Q5SneBcMSVrGUHgPMsEh414kEA48kmywTqpK6tEh5/I1gRNuabRRNYbahp4cIQWy06MoFqz1N3Gloaoruk1FnneRCqSwXL66STK8wmw+5+DDhxzu76Pi7KZzJlksz8N5i99FPjoC3Wiv71yYm4WSGCFxxlL7cNaMB+nPuaBVrGIVq1jFKn6a4uMxNkDdGJSscaMxnTzn8pUrXLl+De89JyennI7HVE1N635lfRD9OOfP3b/ii7XJ6kXJV5sey2WJngg4PMbZOLFcLnsaWsaApQDlPKkOMz887eC61iJpWSWH6FoW7KZ9rDpfHNTJhX+FeTQClehzFso7Ov0eVz95m/lozNG798PQzyivESo0iXvvkQStfVUvqG1Jt9NbKlACmxDYq0RrauP44PER3gLGUUtH42Lq7oPmnjigL1R8QyqTCkFHBKHLzBlqZ9DW4vwS+2GNIVEKnejl+ZJaRV6D5bU7n+cSzrf7I+YCsaLsoTENF+2CITAAp8fH7Fy/RZKkNI3BGEtVFqxt7XH1qecoi4LLN5+mqQ14sbw23ok48R2QYUCi8w4l2/cW51LDCA58lPFIKaOkKcFag5AW4XzsCWrZMXehrylW4mM/k/N+6ajmfQBKG4mkKyxWCXpaLleFB5yS52s4gvGzyZTCXsJ6UGmCrcIMGNs04DxKSJTUS0vlAMhFlGWK5f3hhYizVNpZT63Mqk3oBa9/90e4Gg4Opjx/a5dHb53SyTep8h5+csYnPrNGZ2jZu7LGO3ePUfffQVx7lrfvj7l6KUGIBi880+kMh6TT7ce1ERjBLO+xtr5OkmYURYnUCiU8eW+AkMEyWStNP8+4uqPo91ISrYKk0q7hXIWtS6RokDikD4N3tZJopZdOg945XGOW7oQ+ug42jVkymi72MLUsUUu62QtDVq+//GXq2QlKCtpxQC3LkySaJNEYazBNcG1MtcKU4Bc1ic4CmI5skLECaw3OePZHBaPTIlwzAWccIYVnrk7odDts7e2QD9cYDgaITofJ7JDDD94C/wFFZVnMThmdnZElCe2NL2KRhSVA+wk56E/sQtaF/VSp0I/kkBg8DaCFDGDdcYEBWsUqVrGKVazipyc+FrCRUix7JJq6pqlrZrMZSZZx7cY19i7vUlvL0dEZ79+RVItFlGe1SWCsJEYgspzB4N3ywe6QwQaW8HR2bWdyDA/YC5VrIQQ2JvvLH/PLP85/Lv4hpEAJuUzMBZLJaEan10XI+DqRFbko/2mBQ5Jp0k4nDJ+M9EExX/DBD95GRB28aQqapgkWxO37LHt6RDuxkvm8oLaeIJMJybESYL1lVBQYBP1uTtE4bNPgo/wmfDwfBnpGcwWlZJDMZDnGWqQHl6bMvUNVZZD66WBZjfM44cnSDOfDgFOp9fLztgn9MsH04RjjvMvlCW2lPqHpPQ3yoOV5lxTVjLWNNX7pr/9V9i7vLc/tv/of/hV5b51f/NVfClbLURKXJklw3IpXRimJUkE2ZK1BxZE/LTty+uQJ08Mn3HzhZUSizmfmuAtSOSHRStFNUqQLQyRbzaL3nhqLIbJ1IpowxA9pbfCoU77G7T9iNhnDkyPUZIGpGhpjGauc5Np1dq89S0tIWg9FUXFwfMqg16fX7dCYhvn4DCkE/a09dNqPLn5tn1k81mi/3SbWqfeoLI9DLkGmGY2J6986qqLhbGwYjwz3PzxDzBb4I8f29hqFO2RQPSExAw72Lamuqc7u0uy/ycbuDg8mE4r5OnkeXLZmkwl1U+ObJk6zBwhy0iTRdHJFVUNRN9GqOSTnWiqEUpTzEYcffsCZVngvOD45QtqGwfYmqU5wTcPa+iZlBMvdXg+RaGzdUBUlRbEIZgmxsCA9NM5gjY1W6oHJK4oC7z3dTm9ZwLjoWrh37QZCXF/2Ji3XNaB1GljcCG7rqsLYmt3tNZ4a9GjqOY8+/ABbjLEerAhSQ6EUViaQdTEuFEJ2rl1nPpty6eoOiRI0NuNkvM47709x9ia6PiFPJZOzCfNJSbmYUNYVed6JUrhWxnlxd+OPATUsQa1zYTCyj/vFeYHlgn29uFheWMUqVrGKVazipyc+FrDRKkyF1/pcfuI9OOsYjebMpiVJnrGzu82lvS0Wn/wkOk/oJCmKIlRm48Tti7Kj1sK5LUM7F/ot2oRXLgeHnJchl8k3xJ6HOvYAtT8Qa6JR4uSs5fG9BxztH1LP5riqxBqDSnNqFJeeeobhYHhuZOA81pnlEEMIbIrOE7r9Hteu3aSsLHVZUtclTVnHRvHApFjrkDL02wTL3nBQIYmRkR06P7dCCNI0I+v2SFXCrCzDp22r+0qihUA6PmJ+cPF8WOeorFmyTmFWTwChy3PsiVKW0IgeBowC1qKkPmctnEfHnoYgkZLL92iHajoMQkp0kobk0ssLSZnH2AahQkeDEhalE/ACrQXT6RneVXhvl8wEtCScjzLC0IAdWnnapC8c/7uvvcaTew/o5znXP/FSsGGOr9DONln24/jYR4RHiYuZpEAIRaIFw+EOTijybpdOz2FNw3Q8Q9Xwxtde591iRo5gOFwnzwdYk1DJnGatx47Kl15bLkoqVZaCFOwf7LOxvkE/75B1e5iqYveZ5yiXHt3nx9POLDp384trRnp8UzM6OmD/3vsUo1MWkxnzyYLT0zPOTsdMZwuauqG523DJShZxJaYY/uGrhodCMDUN3itUmvLo3/xfSaTin7yj6eYBZBycjjBW0BMS0i4qTVEqwYsZzjWYRlLVhsZ6UgRJmiKVjGvC887b73L86DFbuzukacr3XvseP/8LP4fTMLeG73/rG3zuZ7+AVAll0fDUU0/TyzRHR4dYE1gY6y3nokyJtQ3O1TgX5wo5S1WX4Bxah3UibLjR2yGnYnnmgvxVENz8hACpRQBTCLxp2D98QlHMuPb8X8V1cvJU40cl0llwC5wNv+OROJEgdIKwIFQwl6gbx+h4xNpwQD7oIuQlTs48wlWsJxvMZjVV7DesG4NWmrzbaQWS/GQB5o/Ehf3MOYe1flkoCmYeMuw7IswW8h60ECDkn/yaq1jFKlaxilX8RxofC9gkOiVNQtP8xWq+VAqpNE5q6tpzcjzFVnO8KXnm9lPcunyF7bUtJkXJaDJjXlTUtolGAw7nWpATdPWOOGXcs7SSPZ/BfZ7kthVZ4YPtrTENVrJsBA8/HJim++99yGvfeZOyjmDKGExZYs0ZTVNz990PuXL1CjcuX8Jax3Q6Y1IVOOcoZ/MliBJScvXWdSbjMXUVbHal1qFZXYZBlUprpKgREJy4OJcNtegtNLvLMA8jJrOdTperT9/GCs18OqWu66DVV7HvQJxLtf5oBCmdde7cuS0yEG2PipcgVJDiaKVRSqMSHY5fhq+l0kCwz5YySO9ahuOcSQimDE1dU1QVeb+DlBprapZMnAgzjgbDIUorsiwLx2DDeml7F36yOh2Vh6RK0zTmwmdrJYSe6WjM1/7979PNelx/+ilklgX75vCT8SzLJW7wQqDSfGkJHtarigA7TJt/5oVPYgk9INPRmMV8Tq+7wfbW5dCb4S06UfQGHYbra/SH6xiR8GT/mGI0AmToBYvgzxhDWdRURc3ZtAgA0jaoRKPeuUvjYsV+SRDF3iQZLJSXyb2peXL3LR6/9x6TsxFpkpJlSZCueZjPC8aTMU3dIIRnYg3Oe3oIet5T4XgsoPBh4Kfz4KsCwRgjPO+ftPdKYAN0mpFlJ3ipyDo5RVEitMRMFnQHPSoL61vbZHlGt5cHVjWe+UXTMF8s6BQLxidHXL1xjcZbzg6fkHf7XLpxC4MkMZbBWhfna+7dfZvR2YjtnW3qukEJibUBmI/ORnzwztvMZ6PAaHqHi0NLWfbZRebSB/ZVqsDgKKVIs4wkTeLfGUmSsLa+weWnXkDolLpuuHP3DlubQx588D7ojE5vjbNxQeYkUmc4U8c+KU/T+Oh655Ey5eozn+ZSMkRj6OQJab6JWeTsXRlCOWdyIHj/cUJlOqRZSjpMuboOujP8iULNubDwj7mtA5u4HHAbegfb1hwRQTxCgnTB+AT+lH1iFatYxSpWsYr/eONjARtBtLeVGqUu6P2lDAlZa0Ubp3tXVcNoPOd4NGZe1GRZxrW9bZyJ0g7nmO9sMZ7OmS7mjCczFtWCsmpw3uIcGBu08WHwJktABSxnS1jT4CJT4UJJP7ALjcUag3GO/f1DCgOLypF0g5xnUTUkSoAOk7tPRyOu9HvkOmVUFpSmorO5xmx0Sm8wCFIz5yi8RXZytFLUVYXUiuHeHlVVMDk9w06mZFlIpIQ/B2LBwjgySEvZSEzGY8+AlJqrt56hqSoeP3rEfDYNybiUoeosz+eYXHTUErFK64XEtN8jsGlV1QT2JPxw6C+BpZxFCkGaZWxsbLO+uY2QGt9YmmJO09SYJsgOrTPB8ta37x+mrqtBglaatJfEOTQGay3lomAwXKfT69HaJ3vnqeo6WHqLABjaGSVtv0SWpIERisyT9+c2tkjJB+/foyhKsrTDoqwxxiK1PD8ugt2uVAohNM4bqqqmsYGBE96z/8E9Zicn3H7l04xmBXVjuXztBiqRDDeGzKuaui6pFgWT0QxT1czLBQdHD5md/YDZZMr27i7PvvwyOzvbWAjyQNe64jWMFgXW+CXgRHjSTLE1PuLmCy+gsh4mmkK0ttMizq0R3rKYzXn/rUf86Duv0yzmOCGgJ1FahVvNC8q6oiqrMFtGeBo8lRTMlCQRgqJxFNZE04ko74zJuYcAqm3IkJXWKJXgnCNJUorZHGMM2iUo53ly7wFVVVGcHnP5+lX0U5dQSgQXOSFomhrnBY/ufUg5OQUhObz/ATpLEU6QZh0WZyO297aZ3pkivMV6WMyn3MFRLMrzPiTCekmyhM5gwP07dxBRtipFsGB20bxDqmDWIQgMjnOWdpZVuyhaM5O9K1fZvfoUUqXMZ3OqsiTNdhlceZqGNBQGsmOm4wPqekGSZCxmC3r9fmx9EsGOXCi2rz5Hne2Q4NHeYsdTDu68wf7jh/S0QlhF2fSQ2QCXdkg7kIgGlXTjPK4/KkP746IdMOy9wzR1cDTkvMjhXDBYWbLV4k97tVW04Zzj8ePHDAaDFRBcxSpWsYr/Pw7vPdPplCtXrpwTF39CfCxgE5y9mpAAyTDTBXExsW7L+iCEQus0SpU0vrIUlaMoFrRJlfeWYjpDIdhdW+f67iWSTIOSKDxVE7T/O1t9bNNQ1VWoOkcXKSXBxb6E5YMp9rQEMOGQEt568y3ufXjAaLLAGouXUBYLqrrCqSDZkmnCS1/6Mt3ZiOnDx8yOjjhazNHzGVVRsqgN/c01hJAcnY3odTtYHBZBU9WkTcPapV36uztMRxMmj/dxkwknRyfs7gzjINCW/YizWDycO7VB1VRMJ2O264bB2pDb/R5nZyOm0ylCCHq9PjpNENHKWEq1tNuVKgCerNPBWhetigFMGLQYeyUC0IqOYfFcGS9omjnFomQymnL56k0EnoP9fRpTn0t7luxTSKxknKUznRdUZbmsKmulQvO+0qSDIU1tsO7C+5lgleycCX063iFEMFjodHKaul7O4WgXcLvGvLOMzkZ4J6jqhvlizgd37nDr9u1lT5b3nizPUFJircE2DZPTU5yp6XR7NGXFb/273+PSlRuY4REy7bF+9SqLasbx4xOcC/K944P7NGVBb22bzmCd0taMy4bad+lfuQKdHu/fP+HF291lZZ1YVa+NRZnIOOIiSIOyqPidf/VvyX7733P92efZvnkbkfVC95kQKOGZHO1z/OgD6vmCk8MjivlsaTXu48BVfJz/5C/0U4jQZ2GBRVNTVRUX/itJmqGUDDbOnLMAUgq0StAyyLMcltLUCBXuc+89x8cHOGvpdXOyVHO0/5jX/nDO7OyMte1N9q5cYjY6wZsGW5cIAduXt6nKil6/T5pmbO9dDeegLNm6PkTKcH+X0xllUXByeIgUcPhktlyfu7t7VNG2PdEKfOzBiWYfyxlL8hzk+8jbCUSUabWgWbK5c4lOv09tPKfjMcZ7Ot0+xaJmXpd4a5mNzhAe8l4/bCdlydnZGUmiaXSC88Gau5pX1IsxyjVsY3nw+uvM3/kxspuTba5jhAz3RRlYn7KYIm2BravIwrTS1I+63f1x0fYQWtsgZPs5WZpOtFsvLbO5StT/zHj8+DHXr1//D30Yq1jFKlaxij9nPHjwgGvXrv2pP/OxgE1V1cwXJZKQvCqlwt+RBWgHbgrhlqlF7DuPkigRbYQvmJEKTdUYyqaCRRUTw3Mns0G/j9pZI+8kDPsDpJIoKXHRLaluauazGbPxFOn9slorYjJ5+GSf1159ndHU0DRhzkldVxhTL22oBZD2Bjz94ifJTh/yrR+/hW9qBkpTNsFRy0tP4wLTVBYFxoZZEnmWUcznnBwf099cR2lJZ9hH6qvUkylf//1XmZ2e8vmfeQWVRttrfKw6B/taYv+L8579J48ReY+nn3qKfn/A7u4OnSzn5PSEpmlojCGROjhB2ejkJkA4h20MdR0tclnmOSATdCIR4oI9sw/AwUffaCk0UkKn1wsSNa3YvnyVx48f0TQlSraOXW2LskdgUVLhIzuCbzscPNYapuMxm1vby1ma7XwcACFFkGcBbeaZ5znCQwOB7YrJWztdvpXHNXUd+g2MZTaZ45xCa72UUnX6/cDeWbMsXX/jd36fa7ubbGxt8uN3H/H0Z3+edLDGXGasZ2solXEwn9MkQ86OjnjrW99kPp1y5ZkXUP0h1cRhfZ/B5dtLZV7bxzWtWpBKsK8WkOVd8m4P70IPmHNh4GNdGRqjsTPLu2++zXtvf0h/d4+s0wMpGR88pJlPkT5MtDdNdS5gvNiPE6+u8z5YLkc5Vjt7xRgbRpDG/rD+2gZpmuOdJ+95mqKiLBe0SbW1Bt8Y0ng9vdaxD07gnaGpFyiVovsDjDXM5wVFUXN8cEa33+WVn/k8s5MRvSQh21zDmD7eK7KsQ1k2pGkHqeDg4Ahb13jTLIsbxjiMbUBqkLBz9RJNYzg7PGI2mXL/g/uYpkEIFfq0RDB2iJ7KWO+RKgOikcgF5kaowChLGUzir9x4Cqk0wlhm8wJrYTwp8e/vM6sc08kpXX/AzZt7OOdYLBYMhkOOjg4Zj8YIAziJc4p33nwXr7usZzl33nodc3aM6Wo8NQdHT3DWIBXkabbsGVM6zO4RQUTHcoH+KdFeex8d4VRsQNRSkCqJUZJUCISKPXGwHDy8ij85BoMBAF/92jcZ9AfLjiePQIkLHZ3tdGh3bgID51dOCIFOFd08Dc6XUkT1wgXAGmcVeUccLdDKZc8LFRfDtQULH9jB0D/lorRaLR3JW0MN76NrqG1nuoXnsdKafidHSkFV1jRNO1gbEK2LpAYfZKpChFlVzjkkCtpBxpFJllKQaBXUAM4vC0nhXg6W6h6BkBrnbSxchT3ceY8TsSDhOX+fOEcuDF8OH0zFIk34f79c0EK0FkTRLEO0PxtE7OGceLwQJDKJPw+1acKxuLiHimCtr+O4ACEgiYN1w+U6v85SxnzFi2WRKjCklnAhBFLEHj4hlg+HQIz75Qws40woRnK+uKx1mOAbEz6HC2fc+aBuMPF61mURioFekKcJAsHBwSH/3f/j/47Uml/5y7+ARjGZzvj8Fz7P6ekxHzx+xKvf+D2+8MUvMVzbwkyn/PZv/Sa3P/lZnn3+Obr9DsdHJzzef8JwsMHtW0/z6OCA+WzCr//T/5ayKiirhrqxSCDLNVtbOywKyy/8yt8AG4YiP7r/IfcevMtnvvSLfOtrXyPRCfnaFTqdDr6a8Oj+PZw1ZJ0UKRXGWnZ2t+OcQYc30cbeGxZFGU2JBMY4uoMu4Hn5U5/lf/P3/ndonfA//r//XxwfH3L/wUOm0xGtAZWWYT6fEtAfdJEyFPEaU5PlOaaqSbMcpxXXbl7l+edeZnRY89xzz/Lo0Yek+ZAvfuFT/It/+g9548dvc/XGS2SiJlGWF1/+DI8PjzCmYWNzm7X1DR49eYRxBdPxnCu7Wzy5/wHHJ6dcuXaV/eMTPvPFz3P44T0e3n9E3u+QJ318krK3tcF8OseKhC9++S/xyovPI4Wgrht0quIYEEeiE6T0TKcLQLK2MWB0ckqap/S6nZgfeRaLBRtrA5RQKKlIU43QrVYZvLWB5XexGBqX33mrcRyvIET0wwrn0zmPc+G54n3YB7RSiPBSoX/cWc4OH/FP/vm/5G/93f+K4bCLqea88/Y7bO1s8ep3X0Nrz4PHD5md7ZNQMl0sGJWO3mCbp6/c4PBkn/WNdebTBU/f/iLbos/h+H3e+/AOs8WY6fiMvJNSzWv+1b/99nLf/tPiY8+xMd5SGoO0Fi0EWklSn5F5t9wAhAfT3rjOLqvMbZ/KRS+AAHYE3rdD81qHtHAyaw+1tTStlMqHZChuc0gpGXR7PH39Bnt7e6RaxwviKauKB3ceMOxtUcxOKGbjkPQkgjRJ8E1I/lSWsHf9FhvrQzK5Q3fYBwQ9pTiuC0SmqRF427qTOabzBUor0p0dpJAsxmOqxZzOoE8nT5DCsr59lSvPP8WPX/0e+/fv84v/yS+QDro03kTWJICAMPwwrjEE82LB3bt3uHrtOuubm/Q31kjyjLqq8Rac8Wit8MiwEEXY6K3zNMZRGxsB23l/DRFYInQ8+Sqe5/AQlEox3NhgY2uXJNWBPel2uXHjFk/2HzKbTi5IwiKwiT0hQoolGeWjIYQS4UFaVlVwbUNg2gcgHr+cmRLOg9YBJC/KEoRHSkgSRZImzOeL2GsVBjYWszk6yUjSHJ0kZGmG0pJcpzjnqcsK3+0uz2hZzChnBd9663V2nnuBp7/wFfJBn8Y0CCFZX9tgPJmQ9gdMHj3g7MGHPPfSZ7j01NPB/c4He900SZb2wzKCLolgLZM4Z1BSYK1lsL7OJ1/5JOs7l9E6xS/XraepK4rRCO8DMGuMwRiHNY6yKOhv7bDodCmriqoqMU0EcdbinUEqHSBlTE6kTpBpB2cCiymcRQiLoIzSR41OMqRKcUKgEhXsrFVwGnMuGCuEOUo+gHgfhj26xgZrZ52Rpl2SLCPNO1R1TdMECqGo50ync773je/gUKAbvGgwxqJEhpSOtfUh6+tD3n3zDUxjMd7R1BYiuNdp1hKZOGdwztDUDbUVkPT59C/+dbKsG653J0dpTZIl5N0clQTWUqssOKdJFV637bNTgdGZF3PuvPYdbjz/IjJNSAg9OE899wJf+MVfgqxLXZd8/xsHiMpydHhEt9OhaWr2n+wzn4xRXrC2vcfVp55B6pROdwOvuxzc/xCxvoYc9rHVAmErtGmQEpQKiV9V17jK0k2TyC75pVLuTyNX2rXjYu+MMSYkc1KQaUWmwtytQaKQxsW+Q49xK/OAPytakNLv9uj1+8t5Zy4my1oEoNwOwrU+9C8qKc7vaQFaShKtydKQSLfDhiEy822y7D3ehj237aGDON8t/ACtJ3+oIYklQAl25SYmJTLIjrF4BFKqUKQTYW+yF6SmAdwkJCrY+eukffa0hxWcGYWPCTviAkiBtolVCBWkxwTpJxBnRFmsP/88YvkaMgyUxYeeT4KMGy/D/oNYWpe31cX2+Y4QJCrsSbatIrkL4weUjCA/AKkWDLXyTo8Ne1prGBKZe+cs3gbQsxwZAWgdjlArtVQhhPPu414SfzYaAUUroQsgV4bXg+jeGJxNrQ8AWcloJyqDLLk1soHQh9oYC1KQ6TTkTwqc9TTGLuf44dYhPsvbWXaDwZD/4//5/8Sdd9/i3XffIE37bO9cwnvBdFbx73779+h2DEVZ8Nwndvm1r/5bDk4eYH/c8OTJPgjH8emH7F66zFe+/Pe4deMGjTP8+je+xvHpKWkWzKKqqsJJwZUr17h182nu3n2ff/Mvfo1rN67RzRRpatBJwqtf+3rI9fAkzBifHVEVcza319FSU5QL6qpB4mmaGqUUxhqwDqE1a90NYIx1NQLFfFaA9aR5wuNHjzk7m/GpV17kC1/+Et/79h/yYP8h63vr9NOce3fvY7wlzVOQMNze5fLuNg/uvcd4bAMYTFIOD4/oDgcs5oYXn/s8D9MnzGc1N29c5fuvvcYHWyn7h/sIX3H//mus79zgr/+n/wWHDx5wsH/IYj5hY20LXztOnzzh1s1r3H/nHn5WUTeOLEvZ3dzi3r33+M43vh7muvVyer0BUirG0zHH3mCt4LNf+Dy3n3mGwWCIkjIMWI4KnLpu6HQyrDGMRjPW1wdopTnYf8za+hrW1ly+fJnHT55QV4abN6+jlY4OuSLmIgEgOneuFmrzL+Kuct67zTkRQQDX1lvOTs443D+hNxywsb5GJ8/DveoFVbng9ddexdqKtc0+1tY0dcLpySlg+b2vfZUHTw64deNpblx/jqJvuf/4x1S1oyM185MjRt0eX/z8Z3jvnbvkUmOtoXt1wIs3XuTKtT2+/eo3qcsZtqmQSfqRfftPi48FbIiDGK2xuLhhO+uojCXNF6TehQb0+CA4l1u1VWaWlR4Ig+SMc2GqPSxP7lJHJcLslMpYVCyAiVi5D9WvwNA0xnB4ckpn0CdLk7CZSokzDT2V8JmXXyT/Yp/ecIuyKCjnE2azOWejYL/qvWd35xI9KRmsD9ne3cMMSmaLBanJGZUFk7JmPl1ABGpaSXxlGFX7CKVwxnLw/gdsX9oNC0cITCejt73G5WducnLvEX/wtVe5dvMqN27fBBEqsIKLtgihx0KJsOQePXxIWZbsXbpEp9NlbXODxpql7ItoFyyEi1VpgVAe2S5O7xEXQUhc+DIyXs4FsOrwDAdD1vo9Mt+gHTid4jzkecKtm09RFCWjyZhplA1Z18TXdUv5VXuRhQDTGJwUJHkeDCKMYTaeUsyLYDndWL75e9/imdtPM1gf0ul0WNgapTVV1eBdg0KSJBLnDR6P0oqmbpiOFvSHm2R5RtrtsP/4kOc/+dyS0bJNs9z8jw6e8Nu//hs8fDDis7/8N7n+4nMkWbhBvO8F5ss4uomiOLzP0Ftu/PxX6K6v47ynGE3DdZIChEFJT5ar6H0X9UHCxophOAFShEpgogS6JVJE2EqklrhUIoQCklB19MS+pAwhtsI9IM9tENpqpo9Wv9Y0lGVFWRacno0ZjRfM5guKosTWJa5ZIKUmy/Nw3CistVhnqWMioSBU1HBYY3DWheZzqXAmMG5CQG+4w87eTdY2tzDWkGUpTVFjAOcFzoQkQkjB7rWrpP1NDh7eo9PJcMZRLaZ0u4o3vvdjlE64/uwnuPTUU+hOTt7NQy9amoZ5Pc4GVtV5Prh7F6kSrj33HLqT4U0AXN45nHHUjcU5gzVlcB2bVzR1TVXWOFNjqjLIEI3FmIaiqEgag0Qxn8ypqoYHd+/iveG3Tg7I8i4CmBwdUVcF8/EJWZqS5h3mRcHk5ICbt26SZwllWeB9wVq/h8Tw3AtXUNkzaJ2EIkcxxTpHWdUszg5Isj7jkxPef+tN1jo9vNTne9yFu/+PbLfL/qBze3pjTDBHkIJEaVKlKQHpPJkMEl7hPfUf+4qr+JPC+0iCLBPssJG1sj7r3LnET8rIONggtY49b85ZPBbdGrBIEUenhcReRoORdm4TbaLOudQw5tLL56aQKtybHryMA4vbp0bIeIP5jo/PgPh5XJStOsAYS9M0WGsDo0AAKkt7cO+XjnqRH2ztBGnXZlsVFyIU1sI34zMmDrz13iKRkZmxAY55h42FN7wIhc7IDIlYSHHxWQYBPEgZK8cRQHkbXscLUDKwNo42yXeBM5EBDIDAuCBBV7DMQWRrsyo8tt1L4/5tbFCBWAdCCawztLPzhLBB+omIleoAfCVhbIJv54+159HF67+UJculU+aySh6ZKiC6eUbHR9qe0rbn1iFVUEp4GUBjYNRiMSoRXL15g04359LlbZ69/RLeGr7627+DTxL+zl/7a3zta7+Fa0p+41//Ovv7HyCk4/hon7KsWVvvM5+eUK4PGAx7TOYFxkw5PHyX/qBLUZRcvbZH8/gh83nBfFHx/R+8wXw+pz/oUczn5PkGZ+MRGxsb1NUxTV1TC0+a5lzaucFsus/p2RkIFxwhhxl1UzMejcMaV5JBv8+1a1c5PT5l0O9xcLigroKLrp3PKSvF2u4VLl/d43vffpVf/yf/mPfuvc/2pT26nZx6XgRWApDekmc9vMo5GU+ZlyVpr0NRNRSLkhs3nqKTZFy7/jwnZxXdwTrldMwzTz/DWz9+g7t33mVWzBBaIa2kmI350fdfZXz4kLPRgq2tXY6PD/jhm6/jXcFvv/tWmE1YzDG24RO3n+Hd9+6QZxprayChbhq6rmEyHVGWFUKn/NyX/wpf+covkiZJZPEV48mYPEvJOxlVXeK85cc/eJMnh/v83Je/RJ6nXLt2lbPTMx49fkK/3+f+hw/4/Oc+F0cA+HAfWU9tXOxHD8VXLozq8HEMRqiJx/vHRyMl7+P9FBKXfr/H7//7f8l3f/Aj/u7/8u+xvbXB9tYmznvKsuBrX/tNHh3u8yt/478kSRI++OB9fvur/4rGGySWeTHiO9/fp5flDDoVZ2cledpnMi1xTnB6esIf/sG3uXXlOuPRCWmiQVi++4ff5vDwkJ1LezSm4fjkMZf2rv659/OPN6DTR0cs75EuPjyFQPpgyytriZMGpySmaTDGMBtPePTgkO7aOm3T/Pnr8ZHKzvKGDmcfD1SV4f4HZ7FiH6nndoynByWDlXLdwJ33j+j3M/p5Qp5pmqLg7Xc+ZDyZ44H1zU1cVSCFJ00TLl26xO7ONjJSyEdv/4hZpumvrzMTY9y8pJ7VJN4xcNCRmjTvkuUZaE3a7SC0Do3u1iClYIiishYrBbIyHPzoPRbFnH6nx4cfPOD48ITj/ROuXr/Gxs5mTG6i6xkSFV3LVKT7R+MRdVNz6fIVdvKM7bUBZSdjspjSGBNkDcTs2UcZAoAXS1YBwuacJzq4RKUpwbrZUpcVVd0wnYxpphOsFHQl6M0d5PoWPsowsjzjUucSe3sC2xjKuqRYzCmLgiTNllWA1na2qkqMFTRCc3g254MPn9DtbQZbaN1FqoKN3WepbZfR2HN6OsM0Dd4ZqmLB6OQE24QmFWNM6LmJD+Xu+mW6Wyl5lrO1u8snX3kOLxQ6lagkZTAckGjB3R+8wfe+/QYbV17ki3/zC+T9DO8sznmM8VhvwEE1HTHZv4/WOTvPPoVIsmBeEa9ry0LiY5LiDMaff97wDAuPLuddkAglKanWSBWAdpsoSZHi04yWq5NKxop8sgT2IqB3Ltwqy/zXL6nkWF12gJc01lNWNbPpnLPTU472H3J6csRkPKKuC6xtwNtY8VQodcFlT7QP9FCdrZ0h7w146XNf4crN2ww3NtEquLUlWoX/ZRkqy0nzDlmWhgdBoimqhuznv0Cnk/D+nQeMjw7RieeLf/mvMthcQyUBzFkbqstNY1mUNWVZUzcNTSuX8Yq6bHjrOz+gLiuaqg6ue6bGVcEwwjZNsIM29XK+CzGJ8N4uK+O2CWYkX/jZL4Z9pbGYumY2mWKqBaf7T+jkHbJEBTbJWkzVYMqS0clpqPjWNR/eucNguMH23oj5bM7DN94gz7thhhNBToEjyCqUpCgXdDoZabdP2gmyxFvP3kbnvWV5+U+qPrV7YatScj4kc01Tx+qwiPdaYD+l1hhv8FioDd6aP/Z1V/HHRZTvtXOwZAAOzoVEQbZyJRlsw5fSWCLjqDVaBsmSjAx2e13lslgnw9oQLvRF+bZ6f94XCCIaQ8R/+1Cgk0pHNjasCyXDEVtv8JFtBb80lTgvhgDehV7Q5Tw4hfDBkMfjg6w7vn9IsN1Swiq9WA7V9u3ri3BupFQIL8JcMM4VGV64j7iY+2hvap2L4Cf8B+NsBHvnYwmWGD+egqXzKm0vGhAT/8CshHPplwAr/J5EhmOzsWjUNGgVZWYi/G4r7WsdFkORz4VngncRPMZr6lsoGd7bORdYH4IiRQqNFCE3CtMEVJDZWReMP5RER9bGiyC/EkoGlt2DsQ2NCYxZloY5ZqHyrrE4ZDwebwOgWRRzev0uOEtdGzZ2dtnavYQE7rz9Fh9+cIfOZs7O7m2+9PO/wm/8619jNp2ipGKxKOl0uszmp3jtuHztOmmW8c7d9zg6WfCjN34HlWnQkrW1dT58cB/TGLa3NjGmwTQ1AkdTV2RphtaeLMs4OjhEylB8ElZydjri+RuvIJs5J80xZ+MpVd0wXOtT1zWmDiqEXr+LQPDBh/dpqtBWUNdhXp9SKhgcOcfBk0N+/V//S97+wXc4PXzCcNjj9PiEMy8oFwvWtzfDZ1QS2xge33uHNFUkSUJV1lRFiUg6fPaX/yav3H6Ozc1Nfvfr3+Znf+ZLXLm0y/jsEfc+uMPxyZjZfIZOUvrDPkJpjg8fUi7m7Fze4/qN2/Q7msnkiOnU0diK+WSBVNAf9HhyeoSpKxaLAilhXDdhqLxOeP6FF9CdPref/zT9ziZ33n0P7y29NKW7NsQZy97lHSb1DKTn/Xvvc3z0iL1L2yRSIFWQzy4WC3av7JGkGS+9+CKNaajrBpF4pE5i0fECeEaghFrmT22RzMc9wbYjMNr8W0icDcXOoir52c99lsMn+3zzO9/mZ77weTbW1jE4xtMZLzz/Eo8ePeLR3Sd01ZCHj+/wzjs/oHKOZ5+5Ta/bZT4tODs5xA1Suv09rl26yvt371KUNdPpBGMtx/MFnZ3rbGxcIc80aaKjeiO49W5sXqabD//cO/rHZmzEhU0maPLceXWoPZHeU1UlVVEwm0wYj+Y8/9k1pLS0G7aI+9iy0t++xcVveKgqy9HRo8ASRXpY+DjQsP35tvIpAk2eJhqtIUsS8o1LyIHBGUOzmLOoLK6q8G7O4dGYu+l7ZEhmJwdMzs4YrK2xc3mP7Z1dbr/0EiJRYA1lbKyfzWaY2jCaTCmqCWs7m2gtyTtdhr0+WZYxWyyYLAqaypMbwcnRhJPFAdYbcp0wPjylOpuSZDk3n3+JrNePiy5MNFcq2CM7PM5YirLg3gcfkKQJnU5Gv9MjSxPG8wmLujrXfreJrz/XJ8crFSpn1qGcpaqrmBh5Eh16pIwMLMfYeibO0p3PGWRdVJYFQLrUJ3tAkqcp3SzDbwiGw5ReGpzMrHFUjWEyNXTXrrAoNGVVU9ucxcKQa0naHbDVXcfKlIPDMU3ThKS2qTCLEbPJhKauybI8PNjjwEcpJEmacvn6FTb2LgctqvJMpyVZKkmVpVyMuPPmD/ju732T+dzz6b/0N7n68vPM6wnj42DZW9c1QiqqcsGHP3yN4/ff49rtF7l0+3nmR8cQq4sSRzUPdLggMIXIAD61krHHTKNTjUjiYE3n8LiweTcVKl6DlsUk9uAkWiN1SJhsY4LjWbhUQKSGpYjSjvDEF7gowQhJfFgxocSbaUEnVWz0h1y9soZ4+Rka4ygrw2Q84vDgkP3Hjzg+PmAxG0dTCE+idai2GYMta67dvImXKZ//2a+wd+1ppBCR4RJxgxQkqUTp4EymtCDRnl5foXUCIkMIRdM07Gx2uLz3DF5AU5bMjo6oqxJTG6q6oa7qYARiHe1AUuEcnSyls5ZjFiV0NNAJQO6P6N5jQtRW2dvKuAx22VKKmJA5nLWsrQ2DtCMyzz4CVRert7N5gUoSpE5IOj3K2QRTN3jvKGYzvPckaY51niTLYpXMYBpH3TTUVRWWjnOoRFGMRpQjT1nWdLodEPDd3/89/vJXvgIq/bO32xbcEIGNd9jGIlX4tFYInFKIJEFkKc5rTFmhpMXg//QXX8Uylg96oprAB2mwjsASY0i0DrJbIsOACL1SMtyHWoeHsVLqvLcEljIP76HBYoy5cF0d3gbpU7u2RbuHx+ckeKRqpWjBWdG6yNaICEZgOQPLOxWMNZo6gvwgHQ77t4jsA3gRgFvbi9r2T8YTAjIMpg3Fmghszk8YiCDLaw9aEMGFjADIS4xre4nEsh/nwodkeb8KEUcUhFKEJzJOy8PxETQGECOFjIWLFn4Qe1MCWGlzAg+ION+scRalRJStKiwR+ERA5UUoSGCjjM+HfjjpPFK65YDkFkQpJQNosR4rzRKYee8xGKyzkfEjGuHEc+08Z2cj0jSj0+lijOX+/QeMJ2OuXL9JliZMJxNu3rwR1o4Lzw7ng+RUSsF0OiHRCuMtqpXJCbDAjWee5b/53/6vyfI+r732A/77//bvUxUjptM5jTFsbKyzvr7O2WjEeDSm3+tRLE74Z//0v8NLzdbGgDzr0dtcQ9aOTt1hPp+jtGY+n2OtRSkZjWlGWNuwvr0BWF556UWePNln//AQXxc8fPAW4+kh1gT5mZIwncyQMjwflFJMJzOsdeR5FgCfUKSpwhnDcDBkOByyvbvLW/c+oKkqPv3KK/zWbz0JrHtZIIHtrUtMpiO2t7eYTSZRGgmj0ZTNzQ2UUPT7fZ6+8QLr/Q22L1/lwd23efT4CZvbO8yLkgcP7mNtHeRUjUEITVM36FwwKxbUTvL5F19gc/MqmRSYH36f/ScHSCS9XpdOnlH5hvXBOicPx2E8gHUoGRw7dZ4zK2p2Bn0OHhzww6MfBvl83uHa3lUa77h0aY9iUaCk4R/+43/MztYOVVHw4qc/RaeTIYHD/SfM5nM2d7aZzOfsbO0gnKMxFqUUcjnsvi1IhHuiZTMFPhbLbCyCx2Lgso8MnPRLAK+TnPWrz/KVX/oVfnTvIaeTEfOiIO92kDrn9os/g+7sMp3XPD54xGw+Jc1ztBeMx2PyvMPlvUtsrO+CKyhKw6XLt3jv7j0WxYKiNuzt7TEen3JtfZsHjz9g6xO3uXHzBmkuQTjeufOYjY09ZpPRn3tP/5iMjQu9Ec7GgXfn1SzrXdjQY6UnzDlpAm2fpRjr0SLQyO0G0aJG59yScVhSsi5sW7YxIGuyjkbKJG7UFxoNYzndNA22aqjrgtophMqZRLvapgmoOVEJ/c3tUMWpSmZnpxTzEp9qVJLjgcMn+6RZl82tPWbTObPZhPl8RlGUmMqQD7r01td46uoeaZ6TZhrvBHUVpDCTecVkPKMsq+XmdnV3D+cdVV0jlUaJMDC0KsJQxax7nrAZE/oLUKHxMzxsHcZanjzZJ+90UJuKLE3ZXNtAz2dMF+U5U8P5ol5GPOeNNWgbKHQt1bLXSWuFUrGKZcO8k6PplJN5wdbmFusb6yiplzcB3uOa0G/lvcNlwe72nbfepj/cpbu2Qd7fZD31VJXBiJLpyZiDx0dgaorJGO9gsPMURWkpFguaumA+PqYpF3S6Oc57amPCw8zUCKG4/YmX2NjaxllPlinyYY/eIKcuau7dP6Q4ecC9t17jO3/wXa5/4nN8+b/+23S3BpTGUVvLvDDUTTBNqIsxb/zB7zA7Oub5n/sKYucyB4sGKgsuVDTr+ZT52Rxn7PKcSuIsHAFponDOcnN3m5effep8Y7CWqpxTVz1S55BS41XQamsdGvNboGPqJlyilj3BYZua2WzK9OyUyeiUyWTMYjanqsqlcUJIHtrGXolWmiRJ0EkSWJS8S2/QZ7g2pD9cY+eFp/jkp55ntih5/OgJ999/j/3HDyiqWXDdQ3L9qVs898mXufnsS3S7Oc7VSC8QWNIkJe/1SZIEJSWNtSzmFa6qqb3n9MljDp88Znx2htI5axubNPE8tiBMxERLyKDB16G8GhKxqFMP0jwPzpDmmqVMS4QqtYh6DiHP75l2tbcV9HY4pYjg59w1TcRqu6CT53z+Z75IGV0apQh0vE5SEIJFMaMoZri6oqkrqnJGVS6oolGIUpKtnS2sc0xGE+qqDK8R90TbGNJMYeoAILUKxhJ5msQejY8HPEIzsQz3hJA4IYNsUEpslK5a70O/ola49OPVrH6aw7nA0rXgmCidsj4OKXY+SKRUSB49kYkRnkSFNdoYh1YOrYMcTCRquTC1Dr1sWmuEFxSxjzCs7TAsWbTrM7IN4TnogsmN1wEUiDbphmU2Ls4rsIlWMbkHL0Uc5hpZjcgGmaYmT5PY9xrWJLTyuPgMcYAIUuX2/mlndLXPZykUNlIr8ajDc8HLC8ClPb/nzw2EiH18RDlZfBYRNtUAkIJ757ncN9z7Lg6ntq5FMbEy7du+lxDnEs7wM5ZgEa9dkAJ61cKn8FmcE/E4Y25BJHdsYEmFC8XGc0m9x1gbnp8urA+vYk9DoIiAIBd1zlE1TRiIrYLJTp53ODk5QydZGJbtGu69/y461eRpxtraOlXTkOsE7z3FdE6SZDhvmDWGw6MTqqqg1xtSFAuuXrtKqjXWeXSiSfQ23nu63S6DQZfF7IimqanrhpPTM6azGWUTxmAs5jPyTkbW63A6GiObS2wMhrz0qS9z5907LBZzkkpjTbDet4XDmGA+pFTGoixI5j06nS467bG1dYnLly/z3e//gMnsiKJc0O3m9AZ9Zos5eMnxyVkAqkLSmIbFYoG3Di8kzoWcb9Af4IVgY2OL6089w0m54Ojxfe6PR/Q6fXpZzld+9ld4/96HDLq7fPM7X6NuSoq6iiNJQg+lFIJet8fu7i6ffuVnGegutmw4uPce07NDXnvjDYaDPvcf3OPR/iHdbg+darzwLBZzMmsC2yM9v/Ob/4bBxiVefuVz/PKv/m0S8a9RWvD2W29hRSicHT3aDwoB68N5SgV5lrKYjfnea4/Y3HpCogS9fpf+cIOfe/EXqeqaDx7d48NHH/DJF15gdPwh7731OtXTz/Orf/mv0FQ13/nuH2KFYpinnIyOmFZTOv0tTs4mPHvzOmVZkqY69A3H/2tlr8CyJeHi/nH+DBK05hw+zihs/5NznmlRsXH5aX7x6jPcf/iABw8fcenKldAbqDTH8xk3rl7ljTfe5Ph4TKezxsnZCZs7a9iioDEFL3zxJe7e+4Byccj3vvtdNje3OB2N0Cowfrtra7z/zpts7j3Dlb/0Fb7zzd9mMOzy4OFDtrd2efT4CVeuXP5z7+kf0zygddsK1HJbeZLxpm9ZAougsY7KGZwD5S3WW6STse4c971Y/fDeYhsT59G4UMFRoQ/BOsNgZ52s3w2uDjbISJqqCfukEugsoSMlxdkUMRNB07gxREiJKSps07CYzcBa6mKKcx6lU3avXaVazPBNTbo2JMtTzHzO3vYGuYTTs7Mw/NBYut0u88WI6fEpp2eTkCjF9ZDohCxL6cUm1Cs3bqKSUOV2zlIWJYv5lPFkynQ6ZTYvGA4DpWhNkIxIEfpebF1TFGXYDBO1tLL2cVMfjWcIIdnYGJKkGcPhBkmyYL5YMLNm2ahoo7sFosXp4SHQWIvUGu8sxkcqPW4yUilUnuOaGuow++bk9ID5/JTBYJP+YB0d3V+EUFFf7DCN4b1377MoBI2vmC5OQKrwIAS6StPdW2dz2GNRzBE7fRaLGi0Uu3vbNFU3DEj0ju6gEyrtc0dTFkgZKn83b9xkbTjEm9BEX88tnV6Odw39oebh+8f8xj//18zPpvzM3/xvePpnPocTHusdi6JiXhqMCeutrKY8+fBthjeu8fyv/CrZoMN4/xRhwdnQN1KXJYvRGGv80oUqVPpNnGMiKY2hriu2h8PwYHZBCnhyfMwbr79Gp9ePTiWaq9eu8tQzz5BlHWoCY9nEqirOUy1m7O8/4eH9Dzg6PKSYzYM8ob3vfNvAGx+6saAQNiALGBDV0n2oPV4ZWbm8k7Gxsc7ulT32rlzm+tWf4fj4Be7cucOThx9SLaY8vHcfGqjnNbNiTL0oOTs+Qes0DFjFoZWik+f019d57qVXmE7mmKpif/+Ahx8+QmvJM5/4BPbkNO4RYZaStSYyreebqW/t8ggOK0VZRTODcP93OzmdLKHV9kuplutUqDhANtq9IyU6SZAokkQvK7kyFmCcs7hoId5uPteu7ARJW5SNLBuCPVRVTlX18d6SaE1dlzx58AEnTUOexCGzdUGW53QSSHzoqUjSFGcMKpF0Bj2apsFUYU8rFhXrnZxEqT+zB6bV2rdOeCGx9TQmFIucEMgkQSdpSLa1BmNBKWSW0CyH2qzizwpjfUziY6+HF+Fei1V4GS3PA7ESmr+TWAzSMsxj885R1x7rDFmaBvegJQsjUcIF1lkFpjMwlDZW+SXCGQTnToTOm7Df0KoTBAh1zqzEvRXXmps7DIBQcfB1YNZbF0zhofHB4loLweXLl5BSUdc1b73zYzY31xmNz7h67SqD4Xa0qQ96e+nblChGZIAOHn/IfDFDJZqnb92OEq9zz7Ll55eh6GC9XOr+hSAWL8GJwCBJQEW2NYC7MB+L6F6qVBgJQNzjpJDU1iCVRDofm6V9lAa3RyyjJXxASjqaEphWzh5ZJes+WmpoZWXnn/zcUj4UYoMMb8kYLSVqLeke2eJY0Dw5PaHTycizHJ1IjDcU5ZzKWHSvz4P79xn2OxSzBT/3y/8JRIWFtYajkxNAsHdph3/5b38TZyp2tockaZe9vcvhvaTEe7MEe1XVcP36Tf723/mv+Opv/o/88M0fRFbKkWY5jS1CPmAaqnGFLBbsXb3Gs5/6PNODxzx5+Ij/8u/8F/z9v/9/YzIe44WjKEryLA85WKjHkWcpR/uPkEJy+aanv7nL/uN7XL1+nXI6C0Vc78i0IlVpuBdk6O+QWtDr91BaUZY1pq7Isw7zqqaoS8y8piwX/Jvf+He8+LlXGI9OGZ+c8ezzL/LiC5+gsJJP7dzk5MEHXN3b5f6Tx4FNI8hFsyShrho6nQAyvvXqH1KZlL/b/c/YWl/jl3/h50jlnG/+4TeZnDzGC8Xp8Sm9fp+9S5e4885dXBMMCZy39AcD1vsJ5fyULP8Ef+Uv/VV+//d/g/6wj20MBw/30VKSpglHx0dkaUpGhtICyoKd3TWkqsmzPjdvPM0rr3yWvd1dynLMvXs/Ju8MeO/9uyhpuHHrKZ5/8WWuXr5Glml+8IPXmDYVX/zMS9z57jvs7N3gYP/HPPvCC1y9dIm14SCwhfZ8nwqrNyqklvdjWMtBmhqeS0tmlFiTILCuwXkUVJogbYAKV67ewDvHbDZhMFxjfX2NFxYL0jThM6+8wv0PP+DuvTdp6oqttU0qVbK70ePw6Ixnn/8Ui7MHmOYWb73zY67dvMVodMrd99/n+rVrvPypV3Au5Q9+9+t0OjlnZ8doodjc2uT45IzDo5M/957+sXtsiDaEPlaKAq3NUkJjo2TC+cAyJElKr98LWlJ/3joQNIA2uD4ZEygy56iqcln1VFLRmIbR5JQNdkjTFKUlST8L0qWiDhSsMahUI7REJQrR1MwnU/rDAYhAQ/fX19BKYUzDYjzG1A2zxZQszVjf3kE5QzJcQ85njE8OefXttyiyDv3+APDMiwVpmmGqEoRAqmBHLABrLKWrqMqa09OzJTUMBNeuvEPeybly9TKD/idI0+DcYa1FJJ3YrCWWC8/auO36IF0YDtdIsw46SREIpouC2hq2tzbI04xBr0+apDEJkjhfc954DrSNlh7q2oBp61uxAtdeR+LPOhcBhMUaYlWlYnx6Rn84oJvnaKmWx3twOOLs6Ijh2hpCBnMB5wK7JwCZaLJul97aOjuDAcf7Z+RakqgGrCHXiutXrnCYKEbjk9D7YJqQBChJqjWz+ZS33vw+WRJ6mqSWbJ9sceXmFe7ffZsffe9HbF1+lr/xv/rLdHfWqUyDdZ6TySIkv3kGzlMsxjx4+DYkgqde/BRZ1qValJjG4oyLm7+hnM/DgMhuTpplCKWoFwX1bBGSCRmtONvKYTzjzoYHrDGeRd3gadje3uPqjRt0ez1MY8MDuaoAj2sM9++9x7vvvM3h0THGnJuWShn6Ubxor5WPD0uQy5tORBVLRDxR3+4jw+ERVM5TzWtG0wPev/+ENP0BW5ubPPX0M7zyqRe5cvUaH75/l9PDJyzmC9754Vvcv38Xbw3dbp/BcBPvPafHh1jXIIVEJQlapBydTLn1zFM4IVnf2WZrczOkWc6gdHiQnTuUKaROESic0LgIjqWU1MYjy3DNkEG/PswT1rI6GIdwzvYsjTIQSwtw4NwqVkl6ecpoXiyZHWtCcUAiEFrhCXazUmuMDU5s3gYzFCEEg36Pfq+Dd57R6TGP7r3H2eNHYBsa02CtwUT2LOvmLKYT8ixjVtVorZGpoi4TmqrCN4AQYb10c6Io8c/kbJb26CGXQ0S5jIgW13Yp/xXUPlTlLB6dJCS9/sfZ2n+qw7lw3yrBEkS7aJ3t0MtE4UJ9MwoMPIYgVVSoyESGkBc07a3EGHyw9VUK0xiEP7d7DulGSDbCTC9i72lgSkDHtdAyDfFI2qQkJrRFWXE2PmNnZ4dHj55wdnrK7eeeYz4vOTw6QABf/4OvMhmP+eSnP8Pv//5vc3J8n7ybcOXSJT7z6S9y5eZLbO6GJuXFbML1y1cZrm3ghaAsSn7j3/07zk7uMzq8x+1nb/OLv/p3QiHPwWw+Zb4YA4a1tQ2G/bXlc1IKgbM+gCYXTAUkkQ2RIpquKBzB4bF1NzTWhYpzZJ9QEhGfaYeHR9R1xfUbN2j7b2jPUHgcolo5IA5EYNKUFAhUKO5ZF8ce+FYhh/cOE/ccJSW+rcBHMkrgglECbU9fe11EbO6P33duWaSx1jGdTukPBiRpzps/epfLV6/z8MEBKtX87ld/ky///C8wLSvK5pgsyYIZSVnx4cMPmdcF6z3Nq1//Ou9QIge7pDLhV3/1r/H8Cy+zmC+QeDq9wDoUZcUnX3yZN998leuzOdPxmJOTI4qyXBbLEFA3DWv9HpPRiN//3a/R1YqXX3qJxbTm+PgoNLdrvXSSq+qGclFSVQ1JltDUBqEE3/vOt0jTlM6gxy/9pb/G9MkT/vAPfgepGz716S/w/HMv8b1vfY31QcbRaEJZNly5fIXZfE6WG8ajMVLL875TC4dHh2hnefjWu6Qq46WXPwmu4p/+k3/MaFywubfN3/obfxX99lt0sozGhYuuCAUFKSVNU9MdDPjEsy9zNl3w4mc+yTvfmaCE4snhAdOzI45PT0jzAc+/9AqnJ0eMx1PSNAuDr1XoOZmLOaPJhL1rmtu3n+Zf/uNfYzSecvP6Nd56+x2qpqaTp0xnM5z3NCa4xZnakqYZm+ubHB9PcDphZ+c6Tz/1LKenD3l4/wPe/eFr1LUlTbu8+PKneO75T3D37nu89PJneLT/iLIuuXf/PspVvPnmW9yaWdbWd3jmmaeojaUo6+UYkItbQ7CKdzFnCYVo0RYgfPBVjGl9YBtF2EesbfjBG2/w1K1bzOZTsv4anTQN0nSt6XV7nJ2OMIM+t5+5zayYM+z3OR4dsn31BuKx5OhwzOe+8LM8e3XAf/8P/hGys85zz9zgyeP7TGdzdi9dwVvD2emI7tqQ8XhMY1Ou717mvXe+z8svPMu3v/MaxydjkiQjuSBF/7PiY+sV/PlZOG96dIaiLDFJHm9uT1VX1FWFdY6iqoK2GBUrWWHjIG7U3oS+BCFEqAxHSYUUoZJVlyXFdEYpg944TROm4zneBP2plBJVabQMdLX3UC8W+LVgk6zyLICmsgAB+WBItZgHF6WqZP+wYDBYJ/We6dkx05MjThZhA9q4sktdlEEKZBq8lKHCFHWJYfMUFyo0nFtQijDDxMxnlMWCxWTCOD8hzzOSLEXphPWdy0vzgjRNuXTtBp3hOlVdsygWWOdQCG5cvcSly1fo9HpMZzMmszlHxydsbWzSJSfPUrbX1tjd2uN4dMaiqD96zQgNrw4RtNZckDEsqXwfmuutBWdDk6kPzi9SS8rGUByPkFKiY7Uy0YpqPmM+mdJJZRiaOZ8hrMeaINxIdUqVZcwODqjW1miAynp6g0F0/bEo0XDl0i69fo+HH95DiDJYDStFr9Ojqg3TySi4QnnodlOqYsa7b7zObLzguS98iRe/+DmkhrIx2MoyWdSkvU7QzOaKaj7h8fEDeuvr7N16HqUTmqahLOMwSxfWV1PXNHWNlqGHxgOmqqiKAt+Y2LtxXqVbZhciupvFTcQSelhu336G/qAfGAfpqJs6nBvruPvOXe6+e5ezeUEjcrwGlaZIqQNoriucbUJBwQewSZRf6CwDwPpoKRqlbMGGtb34QJxFEbCPpjbw+MmIJ/vfYWtjwO1P3Ob5Fz/B8e5VTk+OwDUonWKcRyU5Z+MR3jtq2wQQoRVPPfMs79/7gMuXrrH/8AEHB49IkoxychpcjXQHsk2ETLBx1gNS47yk7WM2PpqByITGWprGQPye9hVde0gu6yBTcDauO41OdGSsQsW03QMkYmm6cXV7i3FRMlksgqwo1AjZ3L7ExqXLoEJF3jmHVpLaQTFf0JQlnU4HW0NVBpbm7ls/oCynVLMpbZ9lXdd4E75I5wn9fpfZZIRzniRJ6CQdrFOkeZ/aLcjzPAz51BIvgxEDtKqdj9TDL8R5f0bL2HhnQi8TLJ3rWnnLbLSgrupgAS7Ux93af2rDWhPmOElACpRnacpivQ3VcGcRXi6TVUOQLikVnkGhziCWILvdc713cY6JoKpqqrLBNE3YN2TrFha4kSVD17r/EZrFcR4v3bnCIbwDpjGMRqdsb21hraVuLP/DP/01Ll0a4kSfr//ub3N6fI8XP/UKaxvbvPHGd5menWLKOVVZ8vrrXwUMTVPTWaTMzg54cO8OOkkYbl9BJz0mp4944aWX+Mov/ud0O2ucHB5SLM44ObrP5UubfOKTn+bk5IxXv/09QPLOnR/w4IMfkOcZ//v/w/+F9bVtqmJBlidI0RpchGeOIDA0UkYJapTjyiglbS2eaZ9DNvC8rVKiqmoWizkffnCXnd1NkqQTFQqcM0eekCjL9lpbED70qcXakI29P60iZZmXhOWAsRZhgzpE+AA0QxHBtPjpvHgjopLPB0ZKQuzTC2ye1pon+wfcvfMj3r9zh6bpgKzYu5RhqXn30QP03n3WegMmozGf+9RnGK5tcK2uuff2Hf7gm99gd2ub0XhErhTlbMSdt99iNlkwOXvCjWtX6G5cpWkM9++8y2CQcvP6U3TTPt/97rdoqg+pTTAdMc7S7faQwqJQnB2dkKQZ6foGKsn58Q/fpViUNKbBmUko2hnHbD4HF4ZpL+YFOklQQjJfFGFWkrX84PXXsPMJa5sb7B+NuPzc5/jUz/8CP/eVX+L73/pd/sE//zU+90tf4pe/9PO8/aMfc3JyxO997av4xtHrdanLmjzLWMwLAE4PDun1h9SNYW97QCIF2JLZyRG/9o9+jVSDUApbVwjh6fY6FGVFKhLKomJ9c5vty89QuYfc+dH3+dZ3vsPrb75J0u2xtj7kwd0P8EJw7eYtup2MP/z9rwfzBu/odjucnY3oIDg5OOTpv/UcTx495LXvf4edzXUODw+JiAABpDoNIDiuQykk47MJZVFTNoL/7O/+53zqU59EJZJup8vRwT5rwyEvfPLTCK957bXvIRTsPxyzsf51qtkT7rz7Q4qq5kevfoOnnnqWgyePufXcJ1gbDhlPJpimw8ZweG7tLILTIT6MtMC3fX0+FhvDGvXugjEHAof9/7D3X1GWZeedJ/bb+/jrwvv0meWyfBVQBQ8UQAAkQBK0q9Wt7hGnpdYsrWlpzawZ6X0eR5qHnl7SWlqShu3JJtkNkAQBEIQrsAoob7MyK31GmvDuuuPP3lsP+9wbUQC6G3iYp8Z5qMyMirhx7zF7f9/3d+zvb1KUKRfee5PAg5s3btGaXeZjH3kG3/dAafb29iirgo2tGCMF7U6bzGhMnuPXz3VeDrmxukbvwKc14dPt3uPCO2sorZldOMFj5x/ke9/+JqHn0vQCAi8k6Q64dvsGzYkp1tY3SbKcubkFhnHMYND7udf0X5CKZi0tq0pgTGkhb2WnTL7nEUTWvlgIQVVVFFmOLCuGvR55nuMqF+XU2WUChHHradXIFeYIhGZACcvRT/p9WpMtGo0I13XtIl6kaGVXEc/3UXlqp0FlSZlYxEB4Hu3JyZq2AkEYUtahh05dHBjXRVeKeNgnlpLm8gliY+g4gvbsJBNzM3R39pgIfAYHPVzXJYgaONLFcYQN+9QaU1Y1tV9YQV9tD2r3Jbs4l0IgywrpOBjp0AgbSD8cf2bHlSwtLnPy7DmiqIHGIljdXp+41+dOfIvZxXmrnWg2iJOE/YMuRncwhPiew9zcFL7nsrXbZ5Dmo1XaXsCRJuMQc7QL+Lim0rWGxEHJwk7PAdfxMdZIllGIU6E1GQqRG6pMkJQRei/DdSWO26TIepgiRxqFIkekCaHnMSkNabNBZgRVesC91Vvs7Wzj+iGzS0tMzyzSbER093YwCDzPZbrTYXp2jsWVYwjHxXUc9jdv2/wQr8mnf+c3mT+1iFI5Sgu6/ZS4UIStJspURGGT/YMttrY2cZyQ+eUTKAVZllGVJWVZoJTGrd2mVF7a+yr07Gamre7FMQYv8nBcz4ZvKkXSH+JKSZHGuI5LZ2aKMw89ymCwTysImZqeIu51uR0P7etoxfLJk8zMn0JVBn/hBGc/8kkqMaJDWLFvVVaoqqKsCgw2P0kXubU6zjOq0mqxqqIkTTNrc10p8nRImaQURWotXsvSohFVvQnbzhuDzbXY2Y9J37nCl37vd3nmS0/wnT//NmVvB+HI+lkpCKoKzx/ZUko8z6MoCrbW7nDv5k3LE3chj1OMlohoFjVxBi37R6n2Nbo7KujroUA9qx59wxgV7l8jyNcxqsS6MMlDG/laMyPqwn8sLK6d0gyCDhKkYFg7WI3cXlqtNs9+9gt0Vo6h6sFM2Izwpjv4nkdv9TbbG5sMel3u3LpKNtzDdwVFWVCpymZdSIdGs4HQYkz9azRCHAl5UdJsNjl3ZoVjKysIGTFIErZ29zFK0WwcPu8jTdVPH4d4zpgmUG9YZVniuR6OdAg9x9rlZjmoimazZVmJ2jD2fP/l8XMc1g3LaGvjrgy2uZBj4jRoh6qyugpZN8RGyrrZsVP6oijq+9Kv13xt6Wqupa9ZahEYY+8hI8y4gB5JUMwRircxYuwGampHQ6CmnGhe+OF3uXjhFYKozX33n+fWzVVu3XiL7l4HSUDcu4EUKdcvv4TnuTZUWigylbK0PAcObG9tMxLce55DpQqGyYCt7S2EdAh8l1d+tMXFy5dQykNUQ5586uNMzMzRS3p85/kX2bhxDT+UzC0sc9DdRqmCg90Dvv61f8Hnv/gP+M63v01nMuITn/wCKytn0VibbKMPmwlrcS1rpkdVf049psTZ5sZGRIjaRvvlV1/lyltvYHRJVWV8+jNfIIgiS3vTCoS1fJaOW68BI3czq5EZ59NIMdYcaXNIlTNK2UHgiHJvavtpzHj/HNWEepSr4zj1d9imTEqJ43v40jDcT7i9cRchDa4nuXH1XbIswws93nmrx8LcHG+++Sq7g4r55SXmJxognmR9Y4uiSJhcWCDwPd698B6TzSnmZ2dZmDvJiRPH2Dvoc+LMGY6vnODtdy4QBBHf/5u/4NS5cygh+e2v/BanTqzwrb+quPDeJeKizoRLYlzXJU9jVGVRRE8IXn7lR/jSIQwDdFo3+WVBXKQIIfF9n6qqqZTK2Py5+tosTM2yfXMV6cBTH36cpLzGq6++yZuvvcpTT3yIZtAg8lvc3k740YU1Zv025x+eZ7+7wcUL71OkFdK1WYNFXlgqozQ8/PCj/MF/+Y/oNC3z5sUXXiSvSoo0xnguUSO03y8EibSsCBNAI4q4eOFdwtYxPvvpT7F+/V06EyFf+NxH+e4PfsjVS7coVYXv+6zdvcWVC+9ZHZfQuK5LMkxoNZp2SVYuu5sHXHvrbaQENww4NrlsZQbDEl/ZZ78ZRWRFQRgGuJ5HXtjh5ONPPMHU7Axhq4E24EUTfPjZT+I32+z0Bjz24HlW791genaedjvmzKkVttYTpqenuPz+FSbaHYb9Hn4U8v7l6zz66LMsL8ziIEmzrA6IHdHma1twKcakb4NASlWHgNoG3d7PBiEdlCq5d/Mab198l73eARcvv8OJY2cI2+06HFtSqZL9vW36wwGdiSn6/R5lnuE7As+AShPQGXF/wGo8oFxaJEmse26eZ8RZyrInSPqWWpZmGaurq3zuuc9z8dJfs7vtcua+x3BFRRg1KMuSp554gpdfffXnXtF/MSqa1taS1wo4at2vQHg+YRThh4EVGwsQRpFHEVme0t3eQTz4kN1AtKkdGqjpPCVaqTGn3P6Pw24SA3s7W3iBpLfvEYYhZVmSp5mlgwGuq/E9FyWsAK3KDFkcs3nvHkWZ49R8aMe1k14p5FgMqbSyHasna7ezHu3547RmF7hy6S2k53P/ww8TeQ5vvfIGGlucRo0GUTPEjWbxghDPcymLDNdp4EpJnMZkaUKRpZbqVOer2IVaoKVDy48QjmPRK2GdY6qiYDiMqcrSTvSEYHKiw8zMJMYI0jxjZ3ubMIoIw4BKK/qxdWxSrgta0Oy0WHZddg4G9PsxeVHUn9v+LqUVMNJiyDG1YlQo2iYmAGGLQqVGGS21RoHDDVYKSVoULMzM4DYdpAe+67B5O0FohSs8TKXIC2sfuCUMYRgQ9wdce/d1uru7RO02bthkf3+P++9XRK5H4AnKrCCNe1ze3+HYqfs4cf99RM2A6xfeYfveJsvnHubJT3+SRsdHmZKy0Gzux1TCAdclrxRhs0FWJiRFzvTcAlMzc2RZga6sA5dKSjwFbhjR8ANc18VEbYxSIBRFVqDKnMhxcUNoRx5TU22iVojrBzhuk8hzyLKUbDjEkQ6feu5zHOztcvHdt4j3urT9ABFF9XTPcO/uXe6fmmD57P0s1NP6o7QjMx4bChtOW9UhbrWF90hf47l2Eqy0RhhZB9NaxKZSmrKoqHJFUWZkaUYcJySDmCxJSOI+vf0d+nt7fPy5jzN35iTvX73F1NQkqQTpN8lrm+3hzjplkZH0hxR5Yu9NrXGljxs4eJ7PIB6AG+G0zpL7s1TCo14vx621NIDv4rpeTdsRIzLdmGaHMeh4g9CJcRshgtC6zjmjYFnbEOiqxBQFYGwGjsA2bMrmMknHwVOGrLZex9i7VzpN3MCtnfHsfZwMYzs9bkR0VuYZdnfpD/aQskQKRVFaKN+RDiPLeweJ5wuiqIVbo0SudMhNQWeiweqtO9y8sorr+1Rak2U5jSjE932EI62wgKPXenSMkkhGJZP9f1pbhK/Is/q7BCiNKUqKYUI5HNhpXVmgtQ22/eXx8x3G1Hli2hbPNowRjKmQ9Rap63wlIW1w54h2psXhFSuNwpGSQpQ4jjUj8LzQouBGk2UlZZ37ZLS2gY26Dq4EwArQTb0uI+x1HwyG+IGlNIt633r3nYu88frzZOk+vdv77O5cJ0sTev1dev11gjAkTge4nosyBs9AWVSUZYXWmv3uLjPzUzRbEVlaMjk5RZom9AeDcWHfqEOhA98n6+3SHwxxXIdXX3mBIIw4e99DDHoD3NBy9u/cu0XUbGOckKgdcOPGDQZ/9P9iuj2LrGAYbyK8s2yt7/Lay3/Ls898mIWF4/Unt9Pi0T5kGxGL5EghxzQzg8GpDUKyYZet1csIpZibbdBsBni+Z9dCnDqYU42fsfF5H51tAdIROEpSaosWj91dzWETZOo1ZhSAixTjgtCuX9ZFjZrGaIOG1SFTUFhTkvn5BXr7+7z4t99Dq4IsTSirgs70BNu7O1y/vcrxk8d5/83nWbs5xTNPf5jL1y5x68oV7t25wfp2j6XZJu1WyNbWKrPLbcJWk9Vbdzj/yCO0Jmb4d1/7GvdWb7G5focyHzKdHkObkktXb/HxZz/FzHSLH/zN1/nhS6+RV1hTpLLiySeeYGl5hb/6q2+ws7mF47pkwg7/XM8jTVKajRbaGKsjVFZkfurkSTY2N/CcgDhJaDUipicmrLkQJXsH+yyvLOEIw+baVV7sb6BLQ3d/n/vPSi68/WOOzy7yoSe/TCeaYDgYoJQirMO3y7LCk4L2ZIsHH3iAVGm+9sdf5UcvvARaW6qZUigMSf1ZDNBstUBIkiQHIyiLbV78wV/hO4LHn3yM33jyaf7n/+l/wJgCxzHMTU6zsrTC5Qvvk2clzWaLJI7xHY+VlWWKrMKVgqWFU0TG0F6Y5t23YrzOLK6J8T2HRqNBmuZgIAh82q0mGEMcx+RFDo7gIE3Y721xd7XF/Pw0nckOwglZmZ3n8rX3ebfoc/vWddZ3dvjsZ7+IVl1ee/UlOwgQVj+uVMUg7tErPNbW9pmbmuDWnbu4rsv0zDStZsQ4c2uEBNe8TFGblph6nZPOEaMdIXBkyLnzjyFchyJP2d3bYe3eDR453+TlH/+Ix576EBPtBmHg88LfvsaDjz3Jo48+Tp5lHOxuM7e8yOkzJ9A3U65fWyWKNHfu3kIpOxytjDUe2d/Z4abwSLKCdqfN7bur/Mmf/RGDYczC7CyX332d6clJpiY6bG3t8O57F2hMtn/uNf0XpqIJUwv8xKFXP9JyTUeps5h6gmE0vuNSKDVeREaLGMa6ohRliSlLRE1RO8TF7OpqBKjKsLO5i+fZaX3/YEBros3k9DRCQjIYoqqqhtkcytxuIKbKxlQcrQ0qt3bTshbYC8faB2NAlSVSK3o7XdJhjwqH8x/+KK6juX7pXToTUziOh64KsjRGq4LhQB5OlYSlNEgkrVZEo90kaES0pieQczNIx6OoIMtLa+/r+1ReYDnxop4+15kC1ILPelWkqjRlqetzan9XmqaUZYEfhUxNTNBoNPAlpEmKdFzCCSvsKipFmqYgrLjaaEOeFaRpyjBOSdKcrMgP6RH2VzKeJJuR4PsI5Qrq6+9ghKDp+5wrc77/43fZyROLaIQBKysLdBZmKPKUdGCbr11lmKxpP9vr64SNiCJJmZ2fZ3Nnm8sX3mJiYpKN9XvoqgQtuP/JD3PqoQfYXLvFxvVrIJs8/flf58RDZ5DSUuf2uzG9ROF2ppiZqDc5F1AVlQo5tTSNKyV5UqCbDYSW1oISgYNN7nZr7Ybj1BovB4yxugtTabI4YXtrl631LTbX76GSGM/RTE02WFpZZHZhkV7psbm+j/QDHn3yKe6u3uLK+9doNltEjfbYqWt/6295Os2ZPX0OgxXBH44AR8nYlqIY+T7NUI6dvWS9CNlvFXWOhR5z8Ovbxg4GtLbPp7Hb9MHODlvrG8Rdj3yuQVmeoIwTdq7dxnV9JubmWTl9Fse1rnPbe0Ouv/U2WW+f7c0NCr2P52iagUe7HSCNZpCkyMYxdPMEFU2EgTq9h5Fxhev7hI0IN/AwgO95NJtNO9jQiqrOdJBVit/ZJ5Sn8FxBUP+cH0WUVcnavU1cx7U/k8YUgwFlFmNqXZOpndF6WuEaENKz1rXacs/ve/JJtO9QVjb5WmmN9F08IxgOEzZv3ybLYsJQspunpGlqRcv1JF3XOUjGaKZnJhGmoMoUaWw31cD3yUyFO9Xhzs27BMUoDNagCoH2XUa+Hh+UK4sjBdhRrUCNXittKZCVtVu11rEVGI9K1dbdjkPot9BAUea/6NL+n+8hHA5V31jWJGCDFK3Swja1bm25LHCwYahGGAqtEHXB7QgHx7HOd45jr2mlDXleUoysnuuiwuocrEpDK023d0Cj2ayzcmphvda89eabbG7e4bOf+xUmp2colaDX3yMedukN9kmzmOG9W1Y0nuU0mk3mJ6dYXb1DGNnMrIOyHBvRSCko8orewZAirxAIur0eRmr80Cf0ArI8RytVh2UasjxHaUOeVCTDLaZnJjh54iT9QUIhJLdvXifyXYQbksQ5n/rkx+lMLbG3vctUs82PXnyeO1t32DvYZ3NtwOa9VVbmJ0iSlAcfeMSe/xplHFlYYxQ4I/G/RDowej4qVfL5L3yO4fZdttdv8Wtf+S07aHJcjNYIbRsTz3HGmKgxNpzTlhl1BwI4jrCInKkR2Prcj3TEYJ+/ce1T+7UdWkUr644mJdLo2kRPj1E6WWtuBYLT9z1Ad3+Hb339Tzl98jS31tbYG8acOHGag/0d0tgOyELX4WC/y3DQY23jLteuXSRJhrjiOJVWJOkQEwQ88PCT7N1d5xvf+hs+8olPMOjtkQ33SJMDhISrN95mstPm+8//OW+++WOmJya5ubFPa6JDUCjazSa9QZ/OxAmi1jwCB8+XVEpRlQrP9alKhaoMYTTJ3MwCjaAiTis0DkWR88mPP8Xq7V3W1rf4b/+b/57L77xOv7tPr5+ys31AUmie+egjBCam0YEf//hlJA63b10m6Q65e+USvXSbZH+DRqvFsePHuLt6h7wo6ExM8fjjH+bXfvVL9Hq7/NP/x//A9s56nacjUKrE83wazYgkzgijBsYY4mFKlmV0Om2qSpFmGffddx+9/XUy/SjdJGV9cxNlDI1GRDwcsLWzReD7HFtZZmd3izCcohFFnD13DkXF5YuXuHnvMve2N1lZmCPwG1x//xKznYgkK6gqjdJ2b8vywkZ21AhgURlmZha4774HWZzoMBx2abcihFL0ezFep0F36x4bqwXdXhc/zXn5pefprq2xu7NLq9Uhy3JLxXZ8pqfniLXg1Td/TCvI+f73v8/CwjE++YmP0Wk3jixuNrjXERIhHVx5SFt3azR5FL476sR9b47O0x9FoNg/2OP26irpMOHSO6+Q5gUPnT/PqbP385xRvH/1Ot1+j2YUMb+ywl53jc3te9xdXwNhyKscqe3z7BKgdYmQgrKsyMqK+blF1jbuWP27UrSaDathFYreULCwNI/e3iYpUjzv6Of6jx+/UGNzevk4iRDjzA6wXvhKqbFNsISaKlaQZ9YCtcwKm3rsODjCNh8WldE4jkNOAbo6tGutFx5pwHVcjp96kLzSqNJSc9pTTYyp6Pez8UxFiJorWBdKI7gZCdL3QdiJKggbUFjTDHSlKMuCrCrQpiKO+0jP46lPforXf/BdpqbnidodtOOyvbeNo2xBUhYOZVWRJTFaa3w/wPN8S5UrcnoHPVw/ojkxRXNihsZki2huholW2yJHAlwpMXmMqfLxBNBQ84mNQVcVStmGRpUVRhj8MEC4LkJIvCik3WkThSGeY6Fz6oRii7YIQlcStJuM2GcSCY0A6KCMpqw0aVaRZTnD4ZDBcEiSZeR5ZqdO9jIzstAdfcE2Nnbz8NM+3bW7ZN1dBllmbSzbbXa29tCqIGyENFtthKyLbSkZDHp4QUAQhMwvLqN1SWgEWVFYXUtZoIqSmcUllo4vc+Wd19m5e4/Fk/fz9Oe/SHOqSVXkeI5AFYKFyXkefmCK0AVR5FRlQZkWFEpTaIMSDhpNSwYIn8MeTZh6Pz0MptOVQhkoYUxRMIDxYHp5nsmlecqypLe7y97aGusb66zduwDmLbxGk8ljJ5icmcWUJcdOHKPTbvH+hfeJ45SJiU6t44EffvsH3P/4AZMrx8ZozegYkZEEjFL+xhs8wqJszig8UNZoZI3ieK5TUwKtps1xbeL35bcusHbzDnlegNYI6XHqwYdZOXWcznQbXGGnnaO8CQ1Ly3D+/H11Jk6BLkvKLCPpDdndWmPz1i04yHC8OYTfxvF8ZO3dL2rqmJQS6dVTV62pKsXsVJt25JPVzoBKVYQ+hMLB5xSOhEYjoEqGDLfWUNv7VI7Hg/c/QBA2xs2bRKAru76YSpGXhW1KhcRzPALfpdIKpRSu6+D6HlWlweRo41GWOY1gAuH7bN2+y42332SuE9Lf2yRPYzv1rV2v6jvfrjeOg9cKUVoxvXgM4YQ4NQ1FSHA9n1PNmbqIkmitCEKf6ZXjNe1IHxniMF7zxjqL+rcZM9Jz2SbbKIPruzZAzRiKqgCjD+3Da5RNfvClf3n8Rw4bXVAPEqQcF7xS2HVaSFm7dIIwouau11bKqk7qFoCROFJTlBWe41g9W6mJY2v2Mr7eteOWZhR1AHGc8u+++ucUecyHPvwEjz/6ITQSYQznH3kYx1X85V99lWbD57Gnn6bb30DpnMXFOVZvpxx0e7aYliBKn3ub6xgMw8EQKawuVQpRO6bZJj1JcrIst1qGNKMz2cSRkrSw8QFFbvNYclXgOS6dVhspPHb39qi05k//9E9oRA363X1UVTLQcPfmbRwpef5vvkdzdpa52ePcTK4R5wO23rvNzetXOX/fwzz08LPML6zgBmF9jkXd0NjnwgKwjm3+R7kEBot2Aq7j0J6c5Et/5+/y0ovfZXbplB36GXXIPDBVPfipA1EFCNcGpWojxsJ/1xGga6qudJDSojbqCLozgrF0bYhTf3Gkt7Z7h9L1fYDlMxrLeMiVIu8XqFIRNULaM4t0pqeZmTtBX4VcvfwqKskQwjCMe6hSkQwSVq/foMo0V6+8A0KxsLDIndu3WZxfpCgVu1t7XLxyiQfP3c9CUvDjF3/A/ScXuf/ccf7tH/9rDvb3aekmZ08t0mzP8P7Ft2k//BA7/S7nzz/Ou2++RRR4tEyTF/72eywuziMwDAYxzWYDKQ1Jain+ftji5Nn7+ciHnuKxh+8jbE6Qpwn/4//0PzK5cIIHOyf42Kdm+eTnvsTK4jGUhCuXr3Hj9h1KVfD8N/+Us8dO8eDpD7N+fIvN3S2y/X3iQUKZ5dy8foGZyQk83yVPEgTQabWYX1pEOz5vX7vO0oLPRMPw8Kc+xquvv8HavS2iMEArRa/bx/NCgjBCazXO3kmzDIOgMzGF15jhyvVbvPP+P2Xl+CJ5XtoIDNetHdQKOpMTPPbUY/zoxR8ijEO73eLCxXeRgUuuNJ1GSFn2ef/6BmjB8vIxtrc3GA4ziqJkYnICo20uj3AdQr+B0RXa8Thx5kFMWXLnzipnz5zj7u1V3nvrHU6cu4/WfIATBAx2DnCli6wq9u7eIY1TPNcnHg45fuI4gRcxNT/LfQ/exwsvvMA7L/0Nd957CWTAqVNnmJ6etEZSMB6Euq5T3+nguQ4CXWfwOQhRszz0KCRXgFC4woaUz82vMDk5y/b2Bo9Xfb7+7a/ST/pMf/7zgEPWHfDNb/4xru/RCBtcvXKBu2sbqFLRallX0LyowKmNwHpdtDLotqZYv00jbACSdnuCIs9oRhF5XlBJQWd+hjtbG9zbWOdDzz7DYDj4udf0X6ix+b/9t/8V3/3Bi7x26TKpMjZYscgpioyyzHGCxnhjzpMh8cEeZZaBEaTxEAgxjoOUjC0vZV2UaW1A6dpRROBKMd4HdnZ2UaV1SZHSepS7rg0CFNIKzF33UCwrjMHoklKV7O70xmjQyGXGrlMK3/dwHInnOMRxQtrrkSUJSIcLb7yGIx2uv3+JqflFTtx3jvljC3iitKnA0kFVtYBz/4C7t9eIogjP9cFvMn/8PuZPn2V6YZEgCKwFrRhZYluOjqozfBxdL5ZmdJMplFakccxgOCRNUkAwMTmJGzYQjvVob3daNMIAzxE4xqCFxHPqcznS0NQF8cj57Ogc3drTuoS+A+0AM9tGabv5xWlKvzegPxxaKDXPrS2qObTxNPVGsh/3ubp+jzhLUKrEqJJMaBIJgVMR9+31rvKM2ZlJAi8gjwc0Wm2k0Qx7XdI0IWo2aM/NsXj8ODs7a3T39unt7/H2j36EET7nP/wcT3z6EyzMtGiFPqEjoCrJs4I4K4m3tugaqwFStfOHMtZtT9X2qaNBnSNr4wkBIztEU2taVFViqsom2pel1apUlc0qynPKyn6tLAvyrEDoiiwZUqQZHHTZX7tnOauOi0Qwv7DIo48/zLX332dna512q2MtpAVc/PGPECMkU9giVooRYlP/Wx6iOaO08lHwn5DWOQ7p1gJ6B9dzka7903EkQRCSJAmD3QMw1inN8Vwe/vgzOJ1p9lLD3nqM0vKwwRK2cHOkxPdknb/iIlRJt5+TKpdo5hQnOsc5VoEqlc1xGiSUlaKstS7C9TG19a3jWGedJE1xUBRxH1OWBI5EUNA0GZFvs58QgmG/z+71G0yFAelBgvZd2ucatTsgIKz2RTo2zNb1XNIkJfBtqKzj2s+vlEJVVo9U1UFmKEVWVrYQkpJed5+3nv8ug7uXic4eIx72cV13PO0tihwQdZ4FTExNolRJq9Xk7APnMdK3GUOVDR31g5Cw0bRoWaVwHBeqnE7YsM5KRwACYz5IO/vAMSqo6iwQi9x4FkFSBi00ulR2jlHPHjzXJ2iEv8jS/p/1EfguYeDXa5u0A5g6+8RgcITdswziMMiOWhg8JiTZRqWqLDthZJdaqopSVYffZexrYxgPEIy27pmfee45/vxrf8a//+ofYUzOQ+efwXUDHD/Ar+3W37/4Apcu/RClHVrNJtvb21SqotmM0Br2DvZrpDaiM9khcD0ODrp4roMf+JRxQjxM7RDEr62OK0Wz2cCUGiVsto1S2jYawhry5FVFVQ2t5nG6A0KQpUM297btfuh6VmheVDQaEUIahrubkA8ZJkOM0URRSKvRYW//ACEqTp06hxs0bCMjRmZC9iRZRKVGReovV0bjaIuaKLsY0mhNIl3rlipUPdyD2nzFBmkKYXP1bD9q0DV6Io1BGYEUdcEnRvo9ac2OdFXbwMNIiypkTRekLhxrgwNRP9OUh7RiaSz0Jw1s3L2L53ukaYvbt9a5fOUKl668z8OPPsXKsZOs3blFkefWtRRFt1+wtLjE+toNkizBkZL1e2uoShMEHspUvP/eW+webHPh/XdIBwWOLrn0zkscO3s/uB4gWD71ADdWd2j4MROdYxhvgt/7nb9LVRmm55b5zje/QRxbStzavaSOMhAkaY7nSuam2lQKJtsTDHfv0d1d4Vt/fYu//1/8ARJ4+omnePPdd/nMc5+n3V5gbXOXR59+hncvvc7qnVuEgYcsrFb6ztptwnCC6clZhumAg/2eNVMJXExasNa7h9GWGWG0YfbYCvPHZnHlkFde/is81yM5SDh20sX3QpqNCEcKS/8SAoENSbYholYflqQpUdQgCjyuXnmHOB4ihWB3/UpN7XYoHYUqC6ZnW0RBwMbmDnOzi6zeuokjDP1+n/ljpxAmJkkH5Ln1ui7LkqtXr9oMQGXvj35vgOs5CBHWQ/qcJI6RQUCcDBiELstn5tnauMvVa7fodg/oFfvk7yRs3F2nzArKsiSMQtzSZgv2h0OmpqbozMwQtuY5cewEeZ7zK899mp21LRwZMrt8huc+81w9gLF1gzMeAkiMGDkPUjf7jLV1YIdvo+wbOaKqaTs07Q9jdvYPKJ0I6Wuu33qfP/v3B1y/cpml2RkG2QDpKtqtkLtrt/GCAISkKEqEsKh06Aa4tdtdVVYMhkNowvLKss3aa0TcubvGE48+wdX332f9YI8L712i5blMTDa5cu0yv0i78gs1Nq12xN//e7/LuZdf41//u79gZ2+fSldoo21at3DHRa8t0K0mR1UVVVGSS0HluDi1pZ8Uh+5SUlgkxWhVu45YwZrGUGYpZWptVLWEorITWEb2jdhMBz8IcDwfz7dBhUIG+K5LWLuiWU2Nna64UiBMRZkXDMuM3kEPk6Woyn7fzvoOgStpNBoMujHXr97jxJkTdKZCdNFFlzmBkDiOoN2JSNKMODUsPfoMy2cfImo26+A3QxlnFiEaTaJqWN2RAk/VjmT1oqrrRGVjdI3iHPHllzZpPIoiOs0mkR/YAt1YDvCRncEKnV2bi3AYaTH6/Ye5DGDGHHGEQEqD7zg0gzazEy1KZWkUgzim1+9z0BuSpilVneCOUeD6lO0FkLs0dFoL3zOqMqBSAZHv4bku0m1SpClTzRZJr0euDXMz03ieRxxrJmem2ev2ycsE1/eZml+kFc3wyKNP8eRHnub4mWMUecZgmLG1PyCralTF2JTsqkbuHCFxHWXTorXGqAKVp6g8I0tzijyx/65KdFVQZAVFmlEkCWmSoKt66mdPmT1vYnSfjlyPGDcfGEHkuYRek9GPjCboBsiGA1RVct/959jb22d3a5uoERFGkX0tjEUtav67qioqbfVfo+eIGjmydM16TGhqSqhrswFG+jHXcWroWVqnDmGRGFE3RnVyF+//+GWanWlml5ZZWDnB9NIirU6bZiOgGdqG13MdfMeGzckaobMfuT4PNUXLhv9pitJqUtIkpzcY0h8mdAcJ+wd9+nFGmpXs5JqNu9toLZGuIHAlbtEjdQZIWeL5LrPHj1Hi0njwMRItcI4r5qfaTCzMUWQxuj/AIEjTApTG9SRRFGF8je/7GGNtZK11qD6ip7NZM8ooZFnheCFaaVavXGa4fY+oGVEoVQ9dwPFdjNI4UVjTBe2pb3cihnFFq9nk+LFj9Hoxpefhuy2E1uAFRM0WQkqyLCNLE+69f42T588faql+ViPzgaOeTBjL2TfaDpPAx6/RYelYxy60RroBQaOD4wdoKf8Tr/3LY3Q4jrBaFFWh0TYnqaaH6jqzyqnnRKMedPR3bewgbSxCx1AqZSmsQlDV7IGRfsM2CyPK08iFy67d8wvzzC4soOWQb333W7z5zpssLpzivvsexkjoTHRQpiJNe/S6Q+7kCiFcm9UkBUma4bsujpQkA5stUYkCL/BtyLA2hKE1r/A9D+nYIcNICJ6VFUhJlhY4jmcHZo6kKgscz8UISVlV6DixeRmasZHI9ITlvzuiwJGCrM4g293ZwfUcgsAFoxgMuywszNCZauEGAYbaEMNzGSk5bd5VfaLtomWfvTo/R5hRXITVPTnSPgfC1IMZBEZIHGlqmi5W2E9tDEHN7qibGoHESHudNQJnNAxUgtKU4+GCwVDVZg4IG+wpa6c8VWfY2P300PZbGcPa7TtcvXyRW6vX0dInSTLLUBGGwJN86Ytf4M/++F+zOYxpRRMMhnu4juTmzZvMzs2Sxokd9nouZVlR6YxHH3+Ut958h827d0n7fTCaIsuQwuH25XcZ9Ad4QYNSz9Lt7hLOFhSF4tXX3qWfVHzm08/x/R/9iE/+yq/wzT//9zSiiDRNLdikFbrKmZ6c4x//4/+GCpenH32Ml3/8Iq12h9V7d/jzr/17zp69Dw9Nsr/BSy++ymc/+9vMz0xbPRIS48DK6WMMDxK2NtYxUtDN9znY2aZMc/Ikt4MD17XrmsGiz0LihS531tfY7O9y4uQxnvvMZ+l2Y958+RUuXLiBUtbsIgoaJHGO41lEQFU1ZVcbwkbAoD8kFyn9fu1eqCq0I6kKharZA67rorVmOQjJkoR3XnuDMPQJPJ9G1KA3iHEchzQd1oY6qg5St4HfRVwQhRFVVda22B7DOEEPNceOLZOmKboqGcQDojDkrbfeYP3eKoPBkHa7xb31AVpVKGUIHA/Pl5RlhSptRuJEp02aJKzeuM3nv/xRJian6ff2KCqf3/jtL9HutDC1G6cdzowKBcb1uHXeldRkKUZ61XENWNcxZvSz0kUKjTaKohjwyus/RiMoleJgd5MZ5THYW8NRQ+ICHn70QYpiSK87QEhJEAVkcYIvA04tL6Ew7A26YASqUmjpoMqK1Zu3mJ6eotHwaU9OU7pNHK+BrPZwpcvKyjEmJhtkmeT9K1d/7jX9F3NFqykUH/3Yh1k5tsL/95/9G969epURM9aS5GtLyhFVzWiKIh8HelpqlQGpkQKK3DY+juPahUFa+F1pgxIaqQ1+GCGEB1pT5MMakVF1Wq1N+TUqJ8tjJIKEWuwnHITr4HseVVXhBgHSD3Fcaw3tuVY85UiJow1aOCg8JmdmmJqbR1UFZZahhMv+zg53b98jTeaYmZ1GipKiHCDyDK0000vHWZl/kObUPGmqSIc9NCO3lZEnxQhFseNaR8LCpE8QHt6EWtmi2nEcwqiJcHyiRk6hSsKWDQDttJr4noeLwDky+h0tuAJpg830SPpIfYVGDdBPH0c5/Xrk6CYEviPwGz6tps/87ARFpRkmKd1un173gCRJ0a6HG0QkQ4NQFY4UlFVFEg8IAo/hoCDPUlSVgzEMkgylNEWW0+m02d7dpT0xje+3iBou0zOneOj3nuXk8gLzc3NkZcneMOPSrS1SJcjqRG3pCAKpEbpEVRllElOkKWmSoIoco0r72bXBkxKUXZSKIiWNB5SFNXQoS2uhbL/VjB96MxaQWrKB/Yajk47R9x49p7ZIGQUrjmfxPdiXkubkBCtnTrC5tkaSDmk0GocCPqe2Oq4pTboOTbDuQUeuZI2KjihNps47MsZg1KEbj6V8SqT0cWTA5MQMK8dPcebsGU7fd4qllQUmJ9sEDRdPSoQ0SGEntZYYclRXdagBGX02+9WRU1D9vGtdOwk1McxgsHRVbaDUhqJUxHHBzkHC9l6fzZ0e27s9kr0tVLqP1hmu6xJNLVgkF2k3STRCGAYVqMQWV0JIoiCo73NNGme4zmjaaiiL0k7TjBW6VpVCa4Xr+yhjaTbZYMj+wT43338Xyozp+XmGw7g+h6Ke2gtUVdVp8zZJ2uiKKAwp8pzn//qbZIMUrRRBw6Zd+2HTujFhm5LuwQGyVOyfPD2e2otR6BA/65msaWgGtND1+qlRusJxHDQShIupf5fnBkgvQDgeRgqyNP2Zz/kvj58+Rki643qs3btNHg9YW1/DYIiiBg888AjTU9NIWa8H9SWz25V9WuQooK1e5xW6zmzRh+LdWlyVpAlJHBOnQ6amZgjCCLD32sc//gk2tlfY393lO9/+S779rb9kemYGxwN0TDNwKIqKeJji+T5R6JHnFboC3/fI05w8K3Fdh6woCX0foY0t3ozE8xycUZGuKoLAI4oChnFKkZQgHKpCUWhFNW5INDOz8+RaM+weUGlDmVsrf9dxkMKiGI40LC8vM794HBl4XHz7TZrNFkpbB1Xf8cARrN5dZePP/jkHScHaxgHbmzf4g3/wj1haPoVWduhYm0HX1jaKfq/HzVvXOXP2FJOdiXrwJA4z2VQFwmX0LDlC1HEMNUNB1EYCtUjVMOKQGUZRExJh17/atdR37VCoKkqksDUJZjQ+PNwnRhbQ9iUFqt6DR2t2GPogNHF/g14/ZW9vm/nZSR544Dw3bq3y0AP3cf6BB/HcgGZrhSvX3ibPhuAIsjwnCkPiYWxpvEqxdneTT3zqk7x36Qqu65IlGWHo47kevh+xt7dFqxmSlSVu0WVuMiJsOoSNisLxOHXfWd546wKnTp7i4nuXkNgoDNdxyYoMjGZpaZ7zDz9OEM1w7sRxClxKP+LCtau0J2f47Cc/izSag51Nzp28j+ura1R5RqvZRAr49d/4bZ5+5mm+9vU/4ccvvGGRTNfFcTR5klEUyrIdHAetqzGjxfMt0lxWdr0rhjlxL8cXEXMzLQZJxsH2Oq7r0Ww06A8G1vAH22Q3QofA9xFS0Gw06A0GNUrv4vse+/sxss6RCnyPMHDJipzFlbNE7RmKPEaIPQaDPo7nkqYpQkBv6w7tVps7ewe4nkteFLSbbfJur37fHjiSPC8YJgme41BpRVFUdNpt9vb3WLt+mcHWOiZPLQVeQu9gnzwvUJWmLBX7RcHEZJuqVLSbDZTWtCYX+eSnP8QzH/kwYXMS32+QZQVTE5MM44TOZAfPs42/RZYZ67oMwtZ8og61NfXABuwwbLSQHWHi2D80t+9eZ2P7NhcvvUv3YI21zdv0+gOkE3I9LZmabnPQ3yGIOty+c4PF+UXazQ4b25s0miGNqEU6SNjb7yGEYX9vD22g0WjSCAJ0qYjTmLnpafLEsr4OBgfMnVgkUznGD1k+cZzZ6TZ7+wPMlWs/95r+i5kH1AWVqhQrK4v8X/+7/5p//5ff4JvffR7qafnosDQZC92i6+k+9oQZI637CIY0TTBliR8GY52AFMJO2uu8jmZ7gsKvQCsarYhs0CVJknqzqEWW1BuUHqluQFMiCqzNsiMpixwYjD+LsDwt+35rvYIbBKRFidraHQcB5nlMNuxjMMSDiDIv8V2fpeOzLB+bRCcDLl/dYuPuJqwdWJqcdGpuiBhvgoga7q5/pyMdmu4UU0HAiEucprl1fBPWf9/zXILAt39vNmi3m3ieZ62Ja3rEqMA+zFWxX1OqtJD8iDp25BqK/0CD88HLfWSibCxEGXoO4USLmU6bamWRIitZv7uNKX2Erujv3ESpfByMddAd4HoeWrs0JqbxggAdTdBabjLfmmDm9DEe/OQixjg0oibH5idZ7Hj0egdsbt7j2q1VikpRKTOeiFnUSZP2u6BKhBfRmZwmiBq0mg2agUea9IkPUoYHB8TDIUWWo6oSlBWAV0fyZ8y4aK/v3cOPjN0UBSN6xFFbcmEOG9JxQJs5bAqPvqJdcAzDgwOCRoPFpSV6ewf0u13aE+0xbEzdCDB+Xuyz5DhyTHkYTTEP32lNSxuHAoJWAlc2WFo+zvlHH+Oxx85z/NQS7U5kUT5ht+eRG8rhfF+Om7af1HuMUCsx4tR84BPaw5HCUjCOfo+0uoTAgHYlE2HA0nSIOjuDMVAqTZJVHHSH3Nvc5t69dXpJjioUUhocVyCFNQFwAOM3kJ1Drrup6SB+4NcaCTsRF1iqZZqklE5OVSrSMicvcrRRVuxZKPr9Lkl/3yI0nkPVLdGFFacaKW02zJh+aXBaIaoq8UMfVZbEgz6qUHieS9LNEAL61R5VUdnNxrGomh+GuMhaN3NYGH3wXI6ua12AGbuWKWWzMCql6iwtiRuE+IGP18oQwqknahoqbVOjf3n8XIeQ1llwa3uD7/311ymSAz7/pa8QRJY2GEQevUEXz3VphBFxnYdWZhmXL73H4GCT+aU5zp57hOnpRaQWOK5FbGx+iR4PJSQ2UHowjLmxeoPd/deZnJjk/EMP0Z6YIK8UL774Mnm2gZQFrbbH3v4ajgML8xMMkwS0pNmytJmystqxLCtrTaldd029XmV5TqvRJE0KDrr7+L7kxPElPMdje3fb5j7JFOk7TExPYLRgKBIcaZ1E8zzDdVz6/R7tiUkWF09w68YNVKlpNAKy3O5V/d4BD9x/lic/9HHi0uXKpffotBp0+/sIxw5akjRFOBJHunhJzr/7t3+I6wdk6ZB/8k/WeOKJj/OFX/ktkjynP+yzu7PF3s4uDz/0ODdu3qIZCrY27/GJT32KVnOCsnbAMlpQ5tVoZ6hp57XYX9ihlDVpoLbxFePagZrRYABHOOCIsQ28i22UBMJGAtTPInUOl0HbCbcW44ZHMMonUtY5zcD0zDyf+NTnSOIed+/epBF69to5LZZPnqV0Ih7+yOfIzAu8/eZFThw/zu3Vm2hjyNIc33cJGiFVWeEaQ5LEPP/951HakA1jVFni+hM8fP5BIj/k5Zd3mJjoIA4qbl67hBdIkFPs7O6CcOnubHLl4mUGvSGBAyvLi8wvHmNtc5t2q8mda1d45mMf5bnPfJksg+vXVsnzhH53i9Onlnjj9Qt86hOfBq2ZXDrGF4+f5smNTa5fv8T3vz/DZz77cZrtadytTTZv36YRQdmMCBsR3YMu01Mz7O3uU3ku0nFRZUklLAXfa7hUlaYoMjzXRWlFkihmOlP8m3/9h+xs3MERDlpptLFDL6TGSEGrEdGIIrvfVwoReEzMztLd2QUhGcaJRQiLCseRtFotXFcy2IjZ3Fxje3uD0LOWx81mAyEESTLEc11KZbi7voFWirSs8BwPo3RNDbaa0rjO9xECi5BKwUG3R7sV4jkeWZzSzTLc+tkKwwCB1YK6jkSpuDb4kDSiAOEIPvXcl/n4x36V0+dO43sOqix57bXX8aMmZ8+cYWZ6ijFTvaa3f2DvHdUm2lKzAUYOVGZ8z8rD/age3iVJlx+/9B02epsEoc/2/hZCSNqNKZQRhJ2A6ZbLQX+LoozJDhSLK/ejtKV9Chw810N7PlmWEYYB09PTbG5tEUYhqqyQQtBqNtjb2bMDByFZu3GZpeVlOhMtkkJx0Buwu3/ArRs3aLaaP/ea/gu7oo1OmdGaMPD53/zOVzh76gxf++vvsxvHdYNRn1xhbQ6D1qTVBtQnclRFGew0s8gylFaEQWD5846l1CijUUaTZwlaYee2qhrXAxaBsFQAar97w2FBPp41j7pXreq1rIa3x99ziGpoY3CcFL/TIS8r/NDngTPLXM4yikqTDBNoORgcVu/u01xYxpMVWhlCrP5CqwSBpNQ2tVgYQ6Xt5F3VokRHOjRaTVBtjAnGnaCqLH2vKh0cbZsgVXdGRgpyzwYkacepHbzk2MVrRIuyn1liTPXB5uToNfxPNDc/mStEfUmNHpX7Bk8K3IbP4socjahNcf4httZvcmf1FnE8ZHJmlvkTp5k/doyp+VlcP0Rpyzt3anqah2K2FTHtKYYbd7j73mtc6g7JtA1wHKESQgjKPCeNEzDghx6uO3K86ZHs7+C5LlmWUqQpZVHac6rtNa2UwmhZGyLIcQOAMLjykF89/pxHzsNhoT+6q2paoTm6GMjD+/IIUmPF/oevaU0hSuJej1a7SRB47O7u0mg2rDbEgBRW1irEaDJYv9r4bdfvof6CqN+/MRJHhCwvn+RDzz7L408/zMLSDJ5nXbRsBkfdeNUroBnNH480xj/r2h/eAT/jXhmdsxGgNHbNp+65db2I1mLb+jxKNEYIPNcQNR2mGh1WFlo8fv4kw6yk10/Y6w7Z6/Y56PZtYaQ1oOuCrB6C1AjVYeNuCzyktJRM39Jgms0GUcMniBoUqqLf65MnGf2DPUyZ0W5FaFXWmjSs6NLYkFWBRTKdeopdVhlaCbIstxtcUeILidJVbZIiamv7CmNqmgwZrnNoBjC2UxzfK/VzPpr8H+l7lNJUdXiukHZddf0QHJdSOJjSFs/W+tuMs0B+efynj+s3bnLtykU2164y2Wnzsc99gW5vQLG9y6NPPkXUaBAPh/QGffYPuly7epH97btcfu9NdrY2cF1DoxXh+C0Wl04xNzvHk099iEce/RCmRvyOBjpHYcD8/CzLxxYZDGO+/s1v8u3vfpVOp8kzzzzD0vwkr7z6CqiyNtEI8X2HolIM04zAcRHCZb83pCpKGlHD6gqkdXc02hCFAWEUMRgOKXVFnKXkRYmQDr1enwfvv59hkpCkCVVpN+QkTjFaEEaRzffKKzASz/Vohg2++Lkvc29tn8uXrhD63thaPmqGTEy02N7e5jvf/ku8IASt8XyrHbIoo4d1I6+oRInWkGbWNEcrzerqLdIs4eVXf0RZFUiR40mPMs954Qd/ThA0OXXqLHdWb7O5fp25uRXW7m1w/PRZLrz9Fo8+cp7llXOYGpmhfn6FU6+/0gHKQ8aCoUaWsc6RgBKHbAmDtfWWCFRZWvxIinFw6qiTGVG6R/uiATvwEoeokNESP2jw2c//Bhffe4skjXnpRz/k9rvv8qUvfYWvf+O7rBw/xdLCCq73Pmvr67iub0XwSYZbU5yzPMORDkWSM+mFmKoi8ByUIxgMEq5ev4HnOHzmC7/CqbOP8doPrnD58g954snHcP0GN69fxnd9pFasHD/Fpe1XyZyC3/79f8DkxBz/4p//IR/78Mfw0PhRg3/3ta/RacwSRA0ePH+GbLjPN3/4N/zKl36bN15/jQfuv4/rV68y0Y6IwiaPPniaf/7P/t8sLS1w9uQSaIdPfeLLHD9/mh9+53me/+638fyA/b194nhIWSpc1+43WZziSkk369FsNakqhaqs+cPa7VW+++KLNcVTY4RjM9sqwz/6P//3XHj5ed5+72081yFLE7RW9XMgGA76OK7NSzxz6jT7B3tsbGwRhgFJmhxaRmepHfIZD11WREFIoxEiogZ5mZMMEzrtDnt7B/W94FAUJYEfUJQlcZJa+mYQ4fgQeS7D2MYIFFlRB3crBJIst4N8lWaWnaRsPIHv+UjnkNHUaLXpDzMuX77GIO5z+tQppqenqIYHhEE03rt3dvbwwoB2s0VRVvW+pS1aK6E3iImChs3Tqute+5+Rw99hDWDdGhXrmxsc7O+xv7/Hwsoyx06epN/Lee7jn+PypXfxPYfu3g4T0TTrO3sI32BMzsx0hyJuIaSD7/pMr0xx88YttLE0WEdImmFEv9un3WrSmehQFCXDQTwOMk4PhraeUYonP/Ek1y9fAqPodf9XMg/4ySJZGNujPPnYw8wtzPGtH7zAu1dv2vCr+nu0OYRwtTA/VSgLYai0hUKVUTSMwQt8EFYnoI2iv7dOnua4nj92OLFz9lGS6tGJufmp92mMGdt0whGoDsYFF6NpjACjoHdwAAKyRHKx38X1IqSwhUqaJJaylAsuvnGJRx87Q1naKbAuc8qypBH4HDt9PzsbayTDmMDzkG4dumY0nckOzY4kCkpL+DE2Z0bXdBOllf2Eui54ZK1bUhol1fgmLLVBVAIXidaVtacVEuFaLqPvOlacfsTVbFyAfmBi/MGi9WgY3OjfR8/r6N9SCFxhw0VnphaYO7nMs7/267i+DYQbJLF10zLYLBawuSOVohV6LDV8Vt/4IW/eWiUrQQunRrbsdSmrsm42JWmSUmR5PYkUR5oR+75838UPPPwwwqsffGNTZSnLOsNBMUYPtdZkWUb/4IAsTce6Lyml5dlLie/5ePVkyXXdWjwuMdI69lmqkl0cR1OQEbpjxXiHyIvS1spbIsBYwwHXc5mdnaG730UVhc05qd/bYasOGFFzZ2uu8PgZkKhCE4UdnnzqI3zm85/h7AMnCQIJRlGHqSOEC0eCcEfOSKNmZtyYjPo9ceT8ipETq30/WtlzqrW2uTHa1Jbgo58dibBrDZCUNTQuGU01TK11G9H67K8RhFIgXYkbejRkg8mGy8p8h6xU9IYZO1t7bNy9S6+3hy5zSweWI1G3dSYySo/7Agv+WjODPC8oy4JWu4VXB2oKaej19kApa8+LGesqTG0Z63suwhjCRlRrMAy+41ndS6Uwla5NOyqiRoDWCmEgr80XBNS24pIg9Mc0wqNNzeHDeaShOXKMXJqUsknSlkqkkcLYoDxVMbKjxhi0Kn76RX55/Myj1Z7g7TdfxpMpUi7w4os/IM9SHnjoQRtQqAEkt+/c4fLFt3jn9e+hqoTAd3EDied7JFlJ0d9i2N3hKoar196hMzFBozHJ5Mw8RtuIAQ04nkczCOj2erz+5uu89+6P2F6/Ru/AZfXmm0RRk273gKqw1zrOEjzHpdEI0EZQ1JP84SC1tEhT2LXYA086VsifFSAcXMcn9H2aSx3yaavtCHyP9fVNTh4/zftXLpPEJfEgRdeamTg+oNGImJzsMDU1xSc/+Rm08FleOc/3/voPmWpPkGQJeVYwMdGm02lQlgVJkuC4Ds062LCsCpYWZklSi5IWRYFWNmahKisqXTfprkO71aa710U4XfzAQylNnuY0w4hBzxY0mxs3EcaQpmssLi2ztHSCl196no3VVb76Z3/Esx//DA8/+mGE49buT6JG5gWSytJ/HAG6DjAch3EeUnvtuMiGbIPAdRxc6aCldWgbUeO0dupohnrRPLIPfeAQI6qbQxC1OPfAo4BGegGrd27x6ssvsr9+l+72Fo1nn2Vyfo6V5lnypM/u5hrde3egE1nNcT2VN9rQ73VpNBsg4Nzx42xsbZLGKf5kh71+woVv/BXD3T7NtsegnzB3/ASPPPVxAidne3uDZtRhYa5J0Jijd7DP9uY+950+xeb6BrgtJubPMDmp2N/eJWwErN67y872LpsHA7YOhpy//zg/+P53OXP6DKu3Vll57ATnzp3jYLDHGy99j5e+F7N6b4snn/4wy8v3s7x4mScffpC33rsIaBuzIZR1QDUGTElZVszOTKGxwvxRflFVZrz8ox8SeB6BH9l5kDI89sQTtCcnWJydY6LdZr/XJUvzcYBy3O3hCkHYbqGM5mCvT1qHZgppdUBRq0k00SbwQvZ2dtFac/r0CU4cW2bvYJ+7d9cwypAmmWUaYZvboiwRyNpIStZLudVAf+yzn8aJ++zv7VOVJXfu3abVinAyhzwvLLJeVQhl13/XcfA8yeTEJHGakxcp7U6bPMlYW71FHhe8dyHl137tN3jn9X2+8fWvMnPsPo6dWMYX8PrLrzBzcpndrS7tKEQVBV7U4JlnnyaQLs1GE9/xqXfcGtAZ1UCqbvotsimMtWJ2HElZpTRDn3u317jvvgfodTe5ePEaxxePMdlp8m5/wPkHH2Vv/8cEUcTu9gaLy0sM+j3KNCfwA3b3uiAE/WGMryoQEMcJQloEbTBMmJqcwvddZqdnGAwT+r0efhAiHYeXfvg8vUGPxcUF7qyt/dxr+i/U2IzSkOEo2mHh17mZSX7rVz/H0sIir739Lv2BwRQRQkGVW7730cgrGImzJVpY+k1WlGPrTOEF9k9H2GThvLAiXre25rS/2E7Nf0ZhDj8LlTDjol6PKTX2k4znLRqM0PV0zbq1VEpSFEPSNEHWouB8IPHCEHTJ9St3wG9g4gMcAapMwSQU3Xtk3U10ZcgzDU5gOfGui+s2qMqi5mDr+ndJtLKOaFWl6kT1UZHpfHCqXqMEos4nKbFFc1kVlMYgCkmWpQz7ffIiIwgCwjAiihr4gQ2/cupJ9FgzccRx6yen9j+rWRz92Z7sMLM8gXEcjLRmBFJqMJKJqI2qDFmWEec5WV4wP9GiI0tuvPMyr793CW0k2nHtpiesVa3jyHqDwcK/aWadtox9T67n4vvWKCIIfHzPWhwPhwlxHbgonJGws0JViiLLybKYJI7JUmtpXeS5vRc4YqgwFskfNr+ibsqllLiuZ2lFdQ5Ks9G0U0IBYRDiug6OU1vFOiM06PC8jYM0a/GrcDwmpicZ9Ad0u12iWuBrm/b63j7S2IyvgXZoRFN8+jOf4wu//gVWjs3hSJupIWr/elFX94YPIjI/OQAQQtZ0OI0qc7I4Ztjv0987oLe/R/fggF6vTzpISJKMoiytbTKmFt1i3yPW7tiREtf3cD2XIAyJopBmI6TZbtJqt2m2O7QnJ2i223iNCMezqKXRTo08WBvWoM4TCoShOdlgodPgwTPHiJOMzc1d7t65y/7OFnk8wHVGqK6yqEv9LBtjN6KiLKiKijROUQayIieOByTxwF5bz61NECpcbXDcOp0ZQ7MV2eeyqJCtkP4gIYwCjLG0A89zaLYiQJMrTVXa6yCPDBQcxyFqNMfaqZ/ZwIjDtWwsQRZifM9wRK9XGo3UFaYqLDJUZ4cZpamKX+bY/LzHn/7R/4fjxxZQqmJ3d4N4eJN2O+Lq5YTpuQVOnH6Edy9c4Pvf+GOydAdjSlzPQRltnX6E4f6HHqHRmeLCG68idUWz1eHtC29y+Z0LfPHXvsL5x55hZNM+umd+/MpL5PkujYbP/Pw8e91t24Bn+1SVIowihv0UB58iLwkDyPMSL/CR0kNQorVDHBe4noOjK1RliIKQqNmk3xuSphmeJ4miAMcTTHQ6VIVCl4b19Q2aUZuDomcF3FlB0/eYbk5adNBYuq0RHjNLJ/nWN7+N7yo+/OFnwZVceu8tDAXdfo+8to2WQtJsNnEdSbvVRAhJGAUUeUkjisiLeshVFpRVRVEUeNIlz3KSOMNgaDQbSCEpc8VBNsBzHaKGRYGU0jxw/mlaUzMM4yGnH3qMwcE+v/u7v89rb13gxMkuk9OzdjvTyprxiFEyjjUVoDYSGuskoV676jVfGzuQqiMTXN+theYap25SJKZG40asAvOBvdIcQfIRdi9zPY+ZmVmUqlhYWGbt3nWqYh90wmBvm+984y4Gh1Pn7qMscoqyoNlskmWFrSuqijoHlEarQRQFlGXFxMwkTuBy59YdBoM+F99+A9eRlgEgXXb3uqTlDaZm59hau8RUu8OzTz/L0lSD1Xv3cF2fj3/i09y7c5vN9VXubN7jh9/8C4o85x/+n/73vPnWW7zwg+dReUGa5nzjL7/GzQevMj3V5vU3f8ztW7e5cPUav/rrv0l7Yornv/t1Dvb2ub25w4c+80nyJCfrDbj//gcxAr7/4kt89LNfYOf2Xd5//zJIhyLPEQiSJLNOdkYQhAFFYXNqoiAAY8hLjVIVvuty9+Z1rl54E+k79OMeWZExOTUFyjA7N8t7l69gjKAoLXrttO3wt6wqpCPotFp4oc9Bt8vpM2cJ/IhTx4/xsY9+lLfffpOnTz9E5L/N3bV1tIKyKK3DpqrAlZSl1YMWVcXM4jKnT5+gv9dluLbFwtwE3X4X33dxXIckLep8RWvqJF0HXWu+puYmCfwAow0PP/Q4iycWyPKEV154ke2d2yAzhJ5gZ2OHZLhJEBqmphrcW7/De6+/xZuvvkNzrsOJU2dw0Vy/coVT9z3A4489jsCpWT/KGmSYEWXC1vIjrY0wdt/GGJCCS+9fIIzg1updktxwxwlIhwnryZCyrPiVT32Gz33pLP2tNTa2NtjeO6AqUgpd8tBjT7K7do+d3V2KKqcyFdIxhIFD3ghI04xGFFGV9tlTShNFDba3dwmDkMmJSfKqZHllGSklG9sb7Pf2aTT/V6KiScf9IOcEGNNygND3eObJR5ibm+HHr7zKpX7X2qE+9BBB5Nd8Vz4wqTwaXmU0pEUJ2Om3WwvKrGWjdeAauTtZtOew8PwAb+OnKgZziMqMYJmf+BijJmdMzdG1S1kt+KyqkipPSA+2iD0XzwuQrovjBQy6cyyeOG3DN5UN6yrjmPjq+wwyRdCcwpEa37UiS3yXLIP+/gFBNMNkZ3L8jsduL0YfTgiMQYzMFzA4NR1JGjO2eRViBI3XqIjWSMel1ZkkLC0UmucVadIFrcmLAulIGs0GrXbHNjvu6HzbYnxUlB1dsH86PNDUlDhQKscUCuk642k5UqDLCmkqphs+E9MhO7ev8uLrb7K+NaA9OcPMwhyzC/PMzU0z3N9mb2ONRhQShDZcbn1zh62tPfxGhB94luqkDFVRkAz7pENDqxUh0PR6A7IkJ0kSsjQhyzPKKrearsoiXXpcU4rDm0AwpoqMiGS2iXHw6ibQceSR4DzrUKWUplt0xxBun97h+ZcSVdOkXMfakgeB1ZJ5vl+fV4GLpsxtmNgog2lEsRpTHYzBugDa3+07HT7xic/xlb/zmxw/sYA0CqgssvIBKpwYT/JHwZKjG96R1jEsGwzYXl/nzs1brN64yfrdTfZ3e/QGMUmSk5clpTGURuNIa4FsufKCIo0xUow3d1XYwkupiiwe2vtUWs98t7ZW910P1/FoNSOmpzrMzE2xuDTP8rEVFpeXmFqYI2pP4Lg++A6mAGUqa1MgBMKBiVZI+9wxTp46xjDNWb+3xaX33uPGpffo726ByhE1wjoawnihz/zyIoVjoLJuT1WRURYZE3PTaKkJOx1mG5MUgx7x/i66qmiEDYZlRne/x9REh7IqSdIMKSX9fgxKWNpq02c4HOJIhyzOMZVBUNX3kuXuKz3KS/qJxkYwvjdHWibJ4XOn1ehnLPVQa+ukp7WmLHKMUhjqxqZ21Pvl8fMdF95+iU6niePa59Fow/LSPGv37vIXf/aH+EGDNEno9XZRWqG0IQgCjNFUSkNe8P77F3jg0ac5dvYB7t25w8Eg56+/9S3SvXWmpto89NiHx2uqMYaDgx4PPfAg03NTVLT40Q/+lMj3KMqSwSBmfnaWorS0WQEErsvUZIckSUnigiyt0Q9XUhSGsijwAodm06fXHxD4thAs8hxhPHq5RVd2t7sYZQjCEEHJ0sICQRCxu7OL0TAcxiRpThAE5LkmT7Z47ZWX+a1/8BAf+tjT3L3VoTO/yLtvvAJC4QiYnZ6mKAp8z7O2ywj6vQGdiTaeK/B8kCKoKWLWjcqVHksLc2zt7FgTjzgnimxYdlWUtuiqLc61obaOtVEE127e4qmn5/m93//7XLmyxhs/+DZus8Pnv/ibls4zorobU+/fGiWM1WQIG9Jtp+12oGi0qWnyZkxbGxV5hnr99hyQnqVIKWXDO8f0dzO+TmOkfjRsFfb/VErhuR47ezvcXVvl/gcfYGfnLqvXL9FuhnhSUlSKg/0ud65fZnqqg3QcWs2ILE8pyhLpOBRZwcREBz/woA6B3e/t02y3LCIlLEW9yKwOuNfrUe53EZubzHeX6O+s8cnf/RTPPPMJbl6ZolIOg4OUKGgwOz3LN7/+51y6eIHzD5zhwU8+TaHg7Mn7+H78N8TDPp7n4amCa++/w2//3u9w9epViqqgu32PWzdv8vx3/5Jzp+dY29vBuBIlJaqq+N2/+wcobXiut83V6/+Y29fv8F/9wT/kq1/9E65cX0W22uzu71q7/jAkDCMaYZOq6lIUFb1eD6UqGy6JzSbsHuxz8cJFPvrRZ2m3O+zt91g6f4J82Gd7dxvf98nTnKooULpir8zxPX+cWZM4GUm/TzOKaDYi/s7/5b+jtz8EI3jk4adQ2j5jreYiE60prt28RppmOK6DrG3Fy7LE80M++fnPMez3uHbxCmQGaQo2NjftHoulYY20ds1Wi4ODA3zPpdlskOalNVZwI5qTM9y8fJWJySZh1EZKOHHmQR555KNMdVqsrfVI8pLA9/iX/+qfs35vlWyY81DnPGl8wNqdu+xsb2Icl/1ujyVvDi1svpWUAiUFSoBbh8dqczgsHfR6+J7LxvYacTxgOEiIk4TBoCBLr6NLxeTUNPPHztFot7l58yrrd28gXcVub5c5b4Fhvwul3YsOul1cx6Uz2WAw7DMY9gl8n6rUOI6L59r4lvnZGXq9AVob4iTFSGlRHUcwMz/Lwvw8cZqws7vzc6/pv1Bj88KLr/DFX/0cpvpg8zB6rKUQeAJOryzReO452u0JXn/9DctD1wrpuIwscO1Uo87rGL+Opa0lhRWfSymQjsvs3CyelKRxhtYgas0FHJ06//TfzbggrL/6E6jD4fFBhGKM6mgxbngsrUehdUmW5qgyt5Z8SnGws0Ey7CODSTqTUwSdBp60oq7IuGjtIEyFrioKo0grn/5WRhznzM5VdTFti+H9vR38RsREp4Pve9Y6L/DxhN1Ix3jXaCGG8VRem9FiCyMLUcd1cNwGkbTQuqUx1ZQ2XVGVFVma2ymi7+N7ns1UlsJGVtY0vpG7D9gGYETJElLYz5VaeNFzPVtwGbuBIDVoh8XpSVRvg3deeJWLV+7QnF7kY1/4BHOLszSaPo4oWbvyHsnuHgtTLTzPZTBMuHX7Ljv7A4TjEYYhGEPgO7QnQ6SANE7Y3dnl7q1b9Pt94iQhz/LxjfkBxO4oDVIIPM8DrL2lrD+LlHZjCHyfMAgPqVK1letIJzYa9pkjz4FWejzB07Vl8AhRrKqKqizJsmzcejtSjmlTnueBkPhBYNOF69wJp7bsBisgr0rD2XPn+d/9l/+I848/UG/U1XhD/UBDg9VajAoEgd200RUHW5tcuXCRd994m+vXbrG1fcAgzkgKS2Hxwgi/EeF1JghbEVONBl4Y4Ps+0rU0s0bosXrlKqqo7PTSaPIkRUrBzvY2/SxhZJvsup69Zxg1QcCugdsW1fAdh8gPaIYBUxNt5hfnWDm+zIkzp5g/dpz2zBye37BQ+ZHPEzgQtAImHzjOuXPHGX7+M2ytb3NvbY1hb4fNO6tIU1FVGVmaWYF1WUAhKcvcZhOVJUlmcBzB3vaOncxKQ5JntKIIJ3DY3+vjOh5hIyTOCrKiwvUKK9bX0Om0KJRNl3ccQVoWeNKzk9baotbDmhOMp7yYD9yTP7lOje6tkW1wVZW1RsxeW6UMlAVVVVqzFermF0ux++Xx8x2V0mRZwSisTghYW19HSkGaDgjzjCzPqSpNXlYoYxE5BFSVYmq6w/z8JHvb6ywun+TJp5/mB9/7HlXSY3qyxYeeeeaw4K2ZAjOzs8zNz1Maza3r1+nv75JkCcMkZXrSTnC3NrfIs5LJToOpqQ5VXhF6IalRVGWJQDAcxvZZkNaqNQgUruPRO+gxPzeHmBRUZYnnuqRxNm4YsswGvEph15coauK6DtJxGQxjQDAcDvB9l6KCm6t3eOKpDzGzcox/9S//JbcvvsaJlWl836KxURSQ5zm+5xEnQ6IooCgKmo22zbiTTTvUiCLrIFYPXALPw+AijByj4bu7B3UDP2r2DWWlWFyY58yZM6ytbbG/vY7jt5hfOcHk5AStdovQCw6p78CIyWGtmM2IRIa1ya+bmvq9WLTG2BBfx7rcWXqtrBulOl/PdWzOXGWDyUdBxI50ahqzONT71W9BCMnN27eZn5nltVde5u23/5YfvjDBretXoCwo8oqiLBkOErSxGlspIM9ipqemmJ7ucHdtnSgMaLatbnC/e2BrBM+lt71Pb+8AISRVWRLn9f6X1MPZqqRSOftbdxHC8NLrL6EcEBU8+8wzdNpTXHvvHbZ7PZaOnWBnew2n1WJ2boX/5f/5v9BsRJZB4jgsLc6jMHS7Pb71jW9SaM3c/AL9u2s8/zdfo3ewx6vbG1Bry/76z7/GvfsfoNLw67/xu0xOzPCrX/wi//ZP/4QfvvgDwtDjE59+mnfeeIveQFJVmsXFJcqixGhNnpUYDI4Ez/PxfN+Gv5cFaZHRHfQRAluLac3169doNyLSrCAIfEb20Wmm68GevR7tdpvTZ0+zevseQeATuAGqVLx38RJL8/OYYsA7b7/CnTu3iFNV61OtBloIgZGGVrNJfzBAGsFbP/xbBsMei0tzDA5iyt0YYQxRFJImGWmcEgYBRZFb+aew9MUwisjyjH5/iBEpN9eu4xYFw42Yk6fPMjnVIC9L3n7vPf7e3/8HbGzdIClKXnrpJcoyZziIERj2uzvsdncJHJder8v5R6cwRpPmGUHgU2lla0JtdTZ6JHYVoj7PMW++8QqpLtjb3uDa5UvML8xy4qTHpUsX8KRLa3qC3BjuXL/MH+3tUKqMNO2TpDGNRgchJclwQH+3Z+20K0UzavORjz7O177+F0jhAzbnSpUVk9PTpLF12DXC1NpzSb/XZXp6hkfOP0yz3eTV3dfZ3d0j+QWcPn+hxuZP/s2f4rk+n/3sJzC6GjcOksN8FAsmGGYn23ziI88wt7DEQb9P2GyijKaoKhTGTjyMRsqjm7jd5LXWZGWBV9syu75vecktSTxMPwgYcQj9mp+qE0Yl5Ljc42fyP44Wp1ofKYYPqWsIcH2XiYk2IAibTdIkobu/R1mkdLfWiKYljUYTB0VzpgUY4n6f3u4Bm3c3kI6LHzYIwg5eEOI4Hq48DCYT1I45cUJRljSicIyeTE3O4DeaR6b39dsagQ4jRN0cvuex80tNZbGowmhib3C9Ou+nfi2lSnKlxraSo+8dQ/QjFOEI+iWkpN3u0J4KLSVJW+GmDajTNKOQ+Qmfmxdf4cKFq5hggk98+SvML04jqPAclyruceu9d0BrZqcnKYqMq1dX2djqURmB4we0GgFR6INRZHGXu1sDhoOY4TBm0O8TJykCWYe1emPK1UgjYj+DzU+KGg2a7Q5hFNLd26PIrZ++AHzft5oKQJcFliNtMObQtWwE4wos7c7UPGtHHDZSwvGw7OrDe2x06keInNamFrvqmmZnKPKs5m5b6pLjWEFwEAQEQcD83Az/29/7POfvnyNwbSP7k1qN0XWuRs0VFn0q0oTL77zNi9//Ie++8z4buwf009zSMgRI16Uz1eHEfffjhhFq9J6FqUP4XITle9kHQgrakxPsbe/Y9UALyjJjb2+H/d1t+3sdB1d4tY7syLMmR42YRAGp1iRZwk4Ss7q3Czdu4QhBIwjotBqsLC5y3wPnuO/hh1g+dYqgM4XBszkhxhojRK4gnGww1TrBmbPH2O8OePfdWd5/8236+3sIXeGWlkbkFTlgKLMcMUJBjSbu9hFItCpxHIdGp0lWWMqBg8CLPIZ7MY1GE9cVRGGEBNqdBkrbSZpWgtCzYs6qLChKa9Hsex6eGxxSP/ngdfvJY1ye1c+cqpQdXmA3Ja3r7CNVG0PUOiZrIqL/o6/9y+PwCIOAvLRUPozNROr1BnQ6bZSCuEYtR/lp2ggqaca27Hu7fdK04oHzj3Pv7j2SNLaUy7kZOq0m80sn62tkauciOXYjdByHRx5+lEtvP8/B3jZBw0cIh83NPfK0RFWGQZwRNaLahVHhCI8sG9Q5WhBFEUmSEPpNAjfgoNtFYFFiiSCJUxrNCNd1SdKMsrBrj+d5bGxsIYXkzJnTNKKIRjPC87vsH+xz4vgSzUaLdDDk5W//NZdfe8mG3O6tMzsZEYUBvu/R6/XIch8hJYPBkLIowFhkMs9zAt/n2NICjnFxHRctDElmg58bUUiaFTjS6l2GgxSjwfNcirJAa0Wz3eTZZ5/B833O3f8Qv/f3PsTe9jZvvPxjFo49gHBH5ikjimetBazdn3Td+I2iEKxpo/mpskBi830cx66hyth1bwR+2hrHwRiXSlYUpf19Th0SbrdIMy7qhTA2RNfA7PQUru/y4IMPs7t+lx+/8h3SPMERDo2ojZIVnidwvAApPTzPJcs0ShuqPLO0ukogg4DTp89x8+Y1HGnthWXgU2Q50vHGLAsppbW7F1Yg7rsOaV7gug6D3h7G83nltdfAh//iD/4RS4vL/Mt/+a9ZPvcQy8dP8S/+f/8zL734Gi5N9l2NMIooChnEMY7nIaSwKJ8QzM7OEoTWNrzTirh3bx2MwfMC+r0ur772Mhtrd3n++eeZnGrhmJT5xRVefOF5XMfnyScewZHWrtygmJw/TjP0KONePcipEdKqoJ8muL5LltlnYtDf5Y//9N9QFjlB1EAKwd7BAU6NjE5Oz/DRDz/DX37969ZFz1UIJ2B+cZETJ05z+cYqnZlZEu3S7x/QDAT7u5u8d+Et8vKA9vwE8d0dkkRzbHmRUlnNb6MR0em0KauSqakJGqHH3l7G3bt30UrhJhZZiuOELMuJwogks3EE8SCh2WwiHMFgOCAKI+Ikwfc8Nm/esPRPKSlx+dDH/w5xdwchPF5+/uus3rjIRKfBvXubuH7EI48/yeqN6xil6O4dIETAibMP8eVf/w2KPEW600D9DJhRLWOohLSyDykwRnHQ7XH92hX28j4nFhY46Hfpl3Du3EOsLA+IAuj3Y9rtDnOzM9y9fQ8lNa12k/5OxuLsHIPeDpXKcB2fLC1xpWTYj7nw7mUaUZt4GFNVFcIXNBoNPNdHedYoqdVqURUlB919PN+lP+hy7fo1EIIsTwlDn92DvZ97Tf+FGps8y/jqn/4FeVrw+V/95FiPYR96MdYgiNo8YKrV4OGzp+nGKe12k0opSqXpJwl3NtdIi4IRle2Dm7yhUiW5KsGR+I02jz5zH5def5UwrEiSApDjYlsIm9tyVB8yVs3UVb5SdtP6Seenw4XtaAOkf+LtWNpO0GxQFYJGp4PjN8iKnMmpKQ4ODsjTBN3dpt108aVP2s3AEeiioMp7lEUMBvJswIBNnNr+976zC1Qcx7ovebQ7bWZnp4lC3zppVNrS4OrJ+wdRJ83Y4nKMhNVTwRpdOHr8lE5m1LhIiTYGXZTkIw5oLZYXjvU9H1H0RrvAyO5YY6wwb38PhMCVwvK/heTkyjxmsMFL33mdvUTz2CefY3phhqrKkKbE8yT9zTvs3LhBK4pQwrC2tsbubpckqzCOzVdxXeh3d9lMEqpK21BCKRFOiNsOqHoxIy6pqVEaz/dt6KsU44lao9WkOdGpkUOHLI1J0gS3RsV8WefHVCWjPs42c9SBsYf3Z31CjxSf9Y00+lOMJug1u1tYyp6lgDmAM25K9Yhyhm1AR8GcRhuMVpRK2VygNCFLEv7J//2f8tGnP8Qf/Nf/RzqLy+P7YvQsWh5wTWmTkrIoeP1HL/Ptv/gGl67cYH8Qk6sKXAevEeA7DjYDVDO1uICSkjLPR3jgWOdjm/4Ar9YPVVoTNptotYXWiiQesnrjskXWaoMDa3vuoMByecdnzxx59keP26jCsOexxNDLUrpJwp2tbV6/cJH2N/+GlcVFHnj4AR558gmOnz2L35pAGUlV22S79dR0cW6Cmec+ykc/9mHu3V3n9Zde5723XmXY26TRcCzSWOZorWlELZIsIQwbNr3c82i2QoIoYjBMcB3v0II98IjCiLIscVxDVKNYqrJFq+d7ONaYGiPADyyl0nHdOjz0yMDkZxxiPK2w58SiM0dMFsSIKmmdZqVTh6fW6+jPMlD55fEfPkYDubKyyGeRFQzjjO3dfSYmmjboVQiqSmGUNW1ACIxT0wKNZBgXrK1v4Tguge8wMzvFr//67/POm28hhM1Rk46DK+v4xrrJl8LhvgfO02pNYfQ9fC8gHuakcc5IUKErw7DOSXId25xMdNpMdjoorQiigCLL6qyOAFd4eL6H7zssLy6RpSnbu7t2mOW6lKVBGFHnfwiyrODm6h2UqggDj+WVZebmFvA8l9v37lEVFc0wYn/HxfMEJ08v4Tnz9Ac9tKpoNhoMk5iiLEEbXCHtedIG5QkmWi086bO/t0V/MKDUBUHk4vkuUroopfBclySx6KkQgrIorYbSdVGF4pXX3uTTz32aN958E+E0+Mynf5UHEdy4edeuo2qUH1Mbr0hb3FvNrGEEjlqih6iHIQLPs0iRViP65midd8BgQ1vVaCWslyZj8ByJ47hWfG3/j22c6kd3dI1H7paTExNoAY8+9ghzU5PgKjAZf/uD75MkA5pRyOTEHMsrp7h+e4PhcBfhCLqDHmhjTUmU4dHzH2b13jZlZenkk502vudCalCVsXkwmV0DfM8nTSwN2w98VKUImyFTy4sMhgcMBjv8+KWXeOqpj7C8eJzPf+k3uXTrBu3mNEYrHK0pqgEmV7gOeGGbuMgReWkrDyF4+sknuXj5MrguOghrq+rbuJ5PGATsb64xGAwQQpPlMYO+Zw0mGlZrmQwTbty8TRi2aEQFnldx5/ZVsiSFqqAsKz79iU+wu73NTHuC62urbO/v2hDrsiIXgqqq8HyPfJgigUopi1BoRV5C0FjAdSWdzjRzK8tUacrgoMfX/+LrDIucXDs89exn6cycILh3wEQrpjPRIGq1uLe+juf7LC8dB5XTalnzAkdaO+OpiUnSNKXX743XD9d18FyffjqoqWgWtfc8lygIUFojJVYekOUkw5QwDEgTaxJihKAzt8Ds/DKXL11kdfUWLhWba3fwQh/fj0jjFFEaouYkjz/5EU6vLDDo7vGjt67wuV/7Cq3OJJ3OBL7njSnMZnRPGmyjDAht9/Vms40ShkGvz4kPP8PNa9d458ol/MjDcyVVVaJKxWC/x97mFpWuEJ5LliS0miHbOxsIrWk2GyAMzbaDVoa4nxEPY04eO8na2hr94YBSVeRVQW/QJfRCgsBnEHfZ3d1DK8XK8jLdXo9ufw/X8+kPBgzjIVVZ/txr+i/U2Hzl936f2blZOp0WVWmIQhctrJZAOA5Ujp1euBVK2803CEKmJqwNXWU0juuyMN3h+Pw071+7wc1+F0dKlDxCwajh8ryscB2HLMsJowanzp3h2sVLtm4UtZ2jlEzNzuG69gbSykLEY86/UpagUbtj2cZHj6lBSiubQF9Pt00NU49RIKzNo+s6zC7OM784w/raBo986ENs3r3LhVdfpdNpEw8TdDFg7fqlmjt4SJUTwhknV4saJteVta1VZW5pJMK+x4PuASKIiKKAZiMiDBu0O208PzjStB0J/BzRVcY0tZ9oXuwJ/ZnXc1RAlUWBqipc6Vhdi6ibJKzrlaVQqHFBJmqqGsJe92Q4JItTtDHE/YR2u81Hnn6Ie5deY21zD7+zyEc+/jB+IKiyGFe6SFOyefUG+f4+7UbEzt4+d9f30Br8oIFhwKC/x3DQQziSdBjT6kzgRg2q+nNJIYiTIQBhIxo31vYjf7C4U1oRD4YM+v0jJ8fQCAI8x7HlYz0dkkfZa6M/fxIpO3KGDzX9ZtzX6FFvPLKWr4Pg7JWrm6W68zxqLuC6AmPcIw2TndoVpXW9KquK7cGQH791kc+urvPY4jE06mc0NeBKh5tXrvDH//yPef2N99iLB5TCTsZCP6KqNKUqSfOMqlRMTU/jBRF5UeJ6QZ1FVNabuc1GGaFenme9+8NGSJpl9Pa32Fy7Q5GnCKNotKbIsgLqzJWROPcQmx1dp6Pnui7cR3Xf6JACgUMJ7MYxu9evc/HGTb73/Rc4eWyJJ598goeffIL5k6cwro/Sdjol0XhS4kUuD9x3nNOnj/HFL3+Ry1ducPG9i6yv3SPbWcdRLpUIyaoKR4ZU5ZBmy8NtBuz2+xjpUKmSsiiJyoJBmpGWijIvLZXQlwx6GXYyplB5xbCX4LmODYf1fQyGqKrA9RnRRP+DiI05MpQYOzZZigp14+5KiSsEuubZ23vq8OkfI7O/PP6Tx9zsLIPBgNTNqLSijBMQlioqHUGpK7te19QkrStLbRIG3wuYn11imAyYmF5id2fLThl3enztq19lf22TVhDx5d/+ezYRHUuVchyLfEoDr7/6Cuv37qI1JAN7HxW5vX6mXvMdx7qENqKIMAwxwrWvIyVZkhPHKb7nsr/XtzazxjA52UaVFWdOncTzPaQruXb9Ft3uAM/1iKKINLNhyX7gUVXQHyT0rlyj02mBFLiOS7PTptftofoVoS8pdcbMzCStRhPXDcjzgu5BFyOgzBXNsEEzauD4HpXS9Hop16+/CsI2Ya1OhJRWQ2uJ6C5FXuK4Dq7rWhMOI2g2ItI0ZTAcMswHPP/CCyzOL/C973ybjY0dPvcrX+TdN9+obfetHtYOs1yLpkt7goWRaDSudGqnLYPEqREz+zNa2kBkIeoRXh2uqJSxAcHCjF0zqenqruOglRxrJ4y2NKfxnqxHdkn1067s/tyamOB3fufvIkzFibnj3Fu7w9zSEk995GO89uobXLtxk2azwdLSPGky5Matu4BmfmmRG3duk8QxXugTRiGeZ+MCwjCydVIjtA6KlaLIrAbU8zyMNizMzaJ9QdSZZfvWDq1Om2bYZGd7k9mpWb7z/b/BDz3e7+5x4sxJbl29ic5LilzRmWwzuzTN+taAphfS7Vnb4/1+Fy2si2y7LNETLaTrEDQmOHn6Ye6uXqLKSypdYoQa61gtxVpSqoosz9nd2yMrrJOjV5SEgU+pBL7v8dKrr3H85Dlyx0e6dRguIKRdF/M8r/clh7LStUuZBinJsoyX3niV+cUl4jihP0w5uTTP5fcuEkYh5x57FJyQ4TDhq3/5Vzz37DOk/etoNOcfeIw7q/dwpcfBvg2PDsOIeJDw2KMPc/H9SwwGQ1zXRemqRkMEBpc4TfE8H11ZqUEQBEjHDk+Q4EqPbGiF+FEQEic2OyrPSlzPpaocppYeJjI7uDrmzuot8kKh+pJK7RL4Eb/+ld/hy1/+db75ze9y0B/y9Ic+xSe/+Nt0JiZphCGddru+D/WRAFnGrCo7VLVI6fbODloUxHGPP//anzFMBuTxgO7aDVwpKcoC3wuRUtJpNhnEA9I0JTOazHHI45xmFLK/163dG21WoB8FaAz31tY5trJCfusWveEAYwyN6YjKlOxu7VpKn7AD9pt3bjM1OUGWxEgnR5UFqlJ48udvV36hxuY3f+tztFptO1StN1tjZD1dBq9+OYNBV7p28rEbs+tIdFGh8hzH8/Edl0cfvB9HGIaD9yz1iRGdzG78VuytWTmxwuq1yzR814oThW0KBBJjLAd2ZnYC1/eQ0sFzHft9RlAUVnRHPZFWStlAqEpRaoVS2vLss5yyLA61J8q6aBV5AcbgBh4PPHIeVebcvnGH9ZvXWV9b46mPfYyLb76B6/uUeUVVT/tVnbdz2FSZ8aII4DgC6tR2YKzt0UqRZxlFkdMb9JHSwfcDFuYXCJot26xpbS1uBUhTh4Bip9XjX/qBqe8RitSR62m0TawXWMeWsUmAtgVvlmWkSUyWpfbcaD1+bdf1rMCv0cBxAit+M4LHHjvDpFtw4bWXqbwG8+ceYunYPOiCItd4joPKY+7dvIFX2fN14eYdkqwkCCKMKFm9d5Nuv490PJqTkziuQ55mloPJ6PdLgjCkLHJy10PUpgmjDAMpRwjD4SdWWtXIicR3autmYxBmZIN4ONE4GqQ6Ksg/4BbHkUb8yN85+nUj8H0X6Tj169fuZFrbv9XoYVG7vR1a/dbNa00vcqWDE3i1bbZGG4UMfBZOHrO0tfqoquowpwfDN7/2l/ybf/YnbHT7ZEbhRRGBEFSqIhumVJWqTQUUnuezuLSCFnK8EfuBRxD6xMOk5q1r0iy3TmeBDxg8V9I92OXuzWu2MUGQpgnNyXmEM1aEMTppH6BJfuAQo49eU27qAN3Ds2YHGrWFtAG6ScLB5etcvHaLhb/5HucfeoCnP/oMZx5+FL89QaUlQtvXkghCR+JNt3n2mUd55NHzrN7b5m9/+BKXL14j0V0q4aM93/r9h5q8Uijh4wSGfFjQnp0jNyWVEERRhJZ2TXObEXGvh+e4BI0G3f0eSAc3CMbvXjoSHBcnCOs7zYxPxn8oT2o8yDB2Ha0qxUgP4CDGDbGD1REcNQ4of4Hp1n/ux+bmNlOTkxgEpbLrt+OA70sb9Ai2mdAapSAvSiQOWV6yuDTJmXOn0SV88Su/S1ZotKqYnpzk7r0N/urf/it2NtfQSqGFRDi1Xbs2aBTaVDx0/gE6E1Nk2ZB2p0kS50emIpogjOyQAOsWqKqKJCusO1btzJckGUEQ0m63kNKhGfogDEVWsL6xZXONhGB+dp7Z6QV29/YZDIYWkTeaoijt+oHdp6zA36UZRXR7fbIix0HakFwpqKqC/rCiGTWJwiYP3v8gvX4PbaAqKlShaLdaXL5+qxb+a2ZnW7iutZN13Rrn8D1AEKcpGEmW5/SHKVVV0QgjJicmcB2XqZlpjCu5fes2uqi4ffUK77z0Q0TlIqVhfe02y8cesCiKnb7VqII937bRGTlU2iZnPEgRAs+RGOMcMe/BOt6Z2rhDHKHcS9deT62PDKAYF9z23/UqWm/H4/VOQLvTpt1pIYDf/P0TrF69zsXL7yPcJr/2m7/D1Zs3WFhc5PSxOdbv3MDzI7a3t8jSjDg+4ImnnyJNEw7298mTDCkk/f6ANM2IUxuoarSiKhV+YBHiIPAZxjGD3QTt3iLp9ZmcaRM4Hj9+4QXee/c9XnvjVc4//BDbW5ts7e7gRCFSw5OPPMLB5h0mghZJAxyjEEZz7NgJjh07heM1UMqQpAWPPf5R7m3tc+LkOe7eWuPYygJX4z7NRpuiKMARHD++zL31LWZn5qi2t1leWWJ9Y5OG0DTCBnma8rnPfoZbq7d54uGH+fZ3vkfUapFkA7r9rn0mhcD3fJt/6AcIKTl25hSuEGzevYXneTSaEbuDmOd+7VdYe/893nnnVQb7O5x49kPsbe8yvbiEPz2JNpKrF98i9By+9e1tpudnmJhZ5IWXXmZ55SSzs4r93S7dfkGaJNYN0XOYmZlimMS4vouuNSVBEKC1JgpD4njIqGTKi7xe9QWB7yE8YfOnPEVRFjjSYWKijaoqgtAnrwX2D9//KJ7I6HX3uXNnk05nkjRLeeSh+zl34jhvv/Eq71+5zf/hH/5DZmZbTExM4ta1pN17dG1tXVPgtUZqM24iRtTmXn+b7Z01wtDFQdHrF7SjCfJUIbxaxmAUcRyj+6rO3fPodfs4UUSaJDR8G5CdJNbBVji2/o5CQ5Ik7O/vEwYB/X6PeBhjlCZNUwQCz/UQDmR5SVDvm81GgzBs0AgKIi+kTH9+p89fLMdGKcRIQ/ET/08CDnY6qxA1Z32kMbDf7XkeRVFQ5hleECC15tEHHsAFXn73bbI6TXZUOmqjKEtFOB0yOT1Bu9lgY+0OAmv7CAIvcNjd2WV/b5cgsMW251lNjiNHXOZDdEIIietZhyvf9xFSEEY+xjRsyJYB4dSZMVVF76DL3vYuUSvixpXrlmLwO7/J7Ru3UEYwOTNLqzPJ/5+9/4q2PLvvO7HP3v988j0338pV3dXVAV3dje4GuhuByCAJCiRIihTlkcbL9po1ttfyi5/84HmyvRyWbVkzkoYaWRI5ClwSJEIUACI3QuecqyvfunVzOvGf995+2P9zbjUAagB55kn4YzWqbtWtc8/5h71/v983SZlQ+HZzYOKRX+V76CrrYzJJrrVmyQtFOjqcnsuJTa7ruDiuh6mKQg1kZckoiZlVZSVorNAII6vJ4SRN2Z5zO72172HS30zq36Oex6JWjuNOF3xjDGmWMOj36A961pqTSYYJk9K/upw5/dEYBGRxQSQCPv+ZTzDeW+XdpER0FllaXqTVDimLFEdaR6y0f8jB2g18JOvrG+zsHSIcD8d3WdtYZXd3Gy8MiZpNFJDrArIchXX2CEPrnKOMbQA9z8MLA5JB/+heFHKqZ7FZOxJHyuo9VFM3NAjD3OICezvbFb/+6H6eNDXTRpEPNi8feC6mjcxPoWXGTs4mtKzJC5mJ0FsbiiJnPBpPGzImdMpKhyOlvU+9MKiuLWgFj3/kMZZWVigqd5aycsgCiS4L/uSP/zFf/eo3GCkDnoMvXPKioCjsPTpBI422IugTp04ifJ+ictNTSjMcxjatuMqTklKitCFJc8LQJ/A9pCNptNs4zkRfVRXhhQ0DxKi/4vwdFQF2cmod+YqimFJHJkOASUv4QSOIKr/GdTDCsDOKOXjpVV5/6x1OHT/Gox/5MA8+/jj1uSUKY8+ZwSCxwbLtuseH7j7GuTO/w+2NQ1559Q1eeu51evvrRJGLIWOc5nh+g2Q0IGzUSJIxri/xgoCyKlWMI8gK6+QoHActDXluQzmVMkhpEbcwsOfRFrUfRFM+qJmbbEo/fV/Ze2ky0AC7Hpd5QZJlaKWg0m1ZAfKvAjp/0ePxj/4aYGi1ajz9zA/IxjmNRlAJbiFNU5RjXRAFAt8P7GTYdbh58xarqxv8+q//JqurN3j59XdxHZ8nn/gobhTx0c9+juVus3LakkdBymIqf6TTnWX+xN1sbt4iCKy1s0V3BPVaA98PSMdjGrWQ2e4Muzv7NBsuRltmQ61ep9SQJClplhKFIVoVNJt1lDHc3tgiTu1gKIkTojBiaXGRVqtJvV7n5q01xuMRNj7APm1ZmrF4ch6DIEmSqbYo7IQ06nWkhNHI2km3GyX33HUBpRS3N9ZxHJ/hYMzuQZ8iK8jyAm0UaZrRbESURYkUHoW2jbpWtig0RtNu1slyRZKmaK3Z2t5BqYLDfo92p82xlSU86bC3uU086HPy+CkOdvu8+9pL7O/t8cADDxPWGtNzbHCqXcuu+UJM0PnyaM/UAiMnVHqB0dWgQFvjk4l5zISGNnGu1JOwaiEsFZ4PDrmmE3EqJGcCw1ZvR1T7ggkCHnvyCZ554VkuPvIYH37qMxhHcPrEEkJ4+EGT1956izjTNBtjHn78U4yTku99/V/QHw7xXBc/8JGOS5rm5HlJnuWV/bvCcV2GwxG1Wo3IC+lvrpOWGY2mz+7BNjevXkZgKIxhbTWkFvh87jOfQLoNnv7Rs8wurfCpT36KnbVrHD+WIoTDe1c2eOTBe+nHCefO3cNb77zG5uY2vVzz8FNfYLS/xeOP3c/dZ08zShO00Wyur1OvN/D9kOX5Re578EOoIuPm2jWkdHBcwcHBLsJIvv71b3LmrrPcWLtNp9vmwoMPcPmtV631teNgfGEH2crmPLmepTam1V5qUDRkRLvR4sbV6xRpjAHmu7NcuXSd2dl5Tp08z4WHPsS3v/118nhApkpyvcsgLwl9l/Wbt8Bx+eKvf5Fr175BUWiiKKTWshTNw4MD6vW6RcSMZnama4f4rsPevrVsb7VajEZD8iyjFgYYYW24FxcWuHF9FYM11im1IUlSEJpmY5GTpx5kb3+Df/pPv4nJY3qDmHvvv4/BYZ9+r8crr77GcJDRaM0wP7PMzdtbnL37cTxhzQEmZhkGUW0mAi3ASGt3Pslx1Aa0UbQ7s3z2176A6/kW4ckN12/c5Nrl97h25TJh5KB0jqqAgjSzkQ9gGI/HlKV9RptNa8m8sDBPXmYkWUK/36csNbVancPDAxr1Ogg74LCGEJ613g48hCMolWah3aXMClzXhUCyt7fL7GznF17Tf7kcm8lDyU9vukcQl9UVTwGvqjCsOOXGEHg+WZpSZjZwUxUl58+dI8tTXn7nHTKhj6bmaLQukZ5DnsUUYaWbENam1pQlWTlkooPIRtCHKmBRWHG+59ncEc/D8zxr0VxN6x3HpsXrqTWq9fXWZVlxhHOKTFGrNZjpztKd7eIHAZtbWwgHDve2bWHt+7heSXeuy6DXJ8/yD+h4jowNKj6vEzEzdxyEwG/OVz/aImB6cjNWzckENoyTpEKBNHdmCdlzdUfBOKWjTAHwqfZocmkmTl/W3llWBWVOv9+j1zskzdIqJ8fOuvXRwH26KNuvLV1wsbvI4/fdzc7aDcr6DF43ZGlhFt8TSKOs2w4w2t9htLONyXKu3FonyxSO57O7t8va2k2U0gS1GgZDPB4hpWtzasKQer2BKkvyNCbPc8pSUa+38L2IdldSZilClda7/877ddrQWurXJJQNYSdyWTqyVATDFFUBqiTpn7W3FtrqJj6I3DBtVGASPDnRclUUsernamXT4ycNalkU1QZpN9kojIjCGo0KRt7b32U0HFITEuFY7VOrPcPv/v5XmBgDlBX9TFRFwh//N/+Qr/3bb5IKBzf0KcqCcZJMp/gfuC8xHDt2jEang6roHJ5n7ai1Viijp59rouPJ84I0SQkDF41mbmGRjfYsUkiUKsnzjFKVuE5oUYafexzdmxODjCOErRpG/BRVS3AH8jgtXKrJr+NgpGB3FHN4+TpXV9f40dM/5pEPX+SRj32c7soJclOtGxUBxpGGhi85f3qWk8d+jY899Rgvv/Aqrz3/bfq9VcoyqDRaYFAYXZJnR41FqRRhFBKPY3zPxQsc65hY2gbKMAkottRAhNV7HU15fxaxmVzTD/SAxjagurrGEzqt0oosS8myjIkd4tTY46887786fvr43Oe/yHA45vBwn9/40gyvvfYCu5urZFlGmRd4jg9AGEZWDxKGpKMU33HRpc23+Pff+AtmZp+jNbuEdGr8xb/bpN/fpu4L7rvnXi4+/Ch+OGOpZZhJlIRF2ArFTCOkO9tGqRIpYWFxjvMXHua1115ldrZL+/RpDvY26c40Mboky3La7TnSTOG6AWFUZ3dnhySJAYgia99vm2EHY6zONAgjSq1Y396iLBS1qMby0iJnTp5iPB6xd3jAYDBACkmeFeztHzA/O8vG5iaOI8ny3E6fS4PreozHMWVWMFzo02nNsLO7h5QuWV6gVEmtFuJISX/QA9MAJPsHh/Zc6pzZmRZRFBEEAeM4tSJq36UWBfR68RQVqdUi0txqcE6fPslcu2sReCekSLe5feM69z1wkSxLCGs1NBJjLI1aaQ0oa/dsNK604n5ZOTkqVaK1XYcmk+4jTqx1S7STbbs2TYZBGIv+O9KxwyttG7RJ/WKme7H9/Xg0pNGos7O9ye2NTVZWjrG3v8/tzVXKouD5F57jjXfeJeknHIwHnFqe4/jKEh/7+Ce5dnuDzXcvEQ+GfO3P/pwnPvWbdGdW2NxYp1SKbneG/a1tVJHTbjUoXMk4yW0AquOCcBiOY3zXs5o8bTjY3rUaoUrLaTCs37pJu9lgOOqT5Ird7R2WFpd49KlP4xYf4Y033qDbnef8vSOuvvIT8vGAzswsTz3yCP/65lf52lf/FZ/45Jd47OGHOXfmOO++c4mPf/yzPP/CT5BSEEUhFy8+wsHuAbevXSHPhgijMErTbLcZDce4wqdWq7Fx+zbXL18hK0uay6fpDQc4UpKXBY2ZNivHV7j+/lWMNniez87GJibPAEFrtku70+Fg94DXf/J9wOpaetkhhwd95rpzBLVthnHCzUtXOdzfASkJak2i8T5xqvCEpDfo88MffJ+FlRVWFhb5xje/xdyxJR68cJ4ffPf7llKKsUL50Yjjx48xHI2s/lJaRsji3HzVxFoNSr8/YPXWbUpV4rguC0sL7O7uk+cp0nNYPnkPjz/6KK+9+DTj4SGH+3vML8zR7s6wPDfP6uoqjz72KLvbu+TlgPWNGxSOw0MP38viTKvan+wC4wiL+oKDg5zWzkylCxJwOX/+PpYXVwg8HyEMSTLizJlTNCOXWzevkaQpfuAhpWCcxcjcwfc9PD/AKTWFW2C0DVRvNGukWYLSJRL7THnSZ9AfUa83EQKyLMX3fA57fcIK6SnLknScMjc3z+zsHOOBNRhxHReDIUvTX3hN/6Uam58uoo/cyMxRccd00Fr9k0klbKYbrx8GjAZD6/7gCFSpuXDXBfr9Ie/dXkNNl4Nqcl3mjPoDXGyQUy0KyB1Z2fVqijwnSzKbUq8N0nMrLmcyJbMIISxn2LXCZ88P8DyfMLRiJ1lRcIR0icIAHINnfHzXQZU+QeCzsLxIb3+fWtRGRj4ry0sMR0OUKjl19jSPPHqR3a0tXnzuRdIkrya2giCyIj6tbNsyP+szMycYJoq52VpF4bHT8KhRp91u21CliiZkjJ3ulEpV6fWVu465g74jYKKxmcy5P3Dp7kRvKuTK8oE1cTxif3+P0XhIqTVmQsGq6BAagzMFHKoi3BiUkTx28THmQsHusEfZmSesRczPz+CiCD2JDdw0DHfWiff32N/eZ2t3F4PDKBlx88Yqg0Ef3/erjTgHJXA9Hz9wUUVJf7Rvk61H8bRRbkZ1ItcB10Eoj/ZMB5UkFWJgG0K7L1lkZtKgTGyQnSptdzSMMUpbqPaOkzm9zydd0J3VpjbTknuCkmltBf5lWVIUxbSxldIljCKU0QRBQK3esIiTshN1rZUV6hvwAp/ZmRkWZrt02x2E43BtNWT11pot3l0H6Xh85Stf4dS5UyhlP+skHFQAf/KP/pQ///NvUToSz3dJ0pS0mn7+PMRp+dgKs0vLKCM+ePdMXNuqzzf5t7KaMsZZTkMpDIJGu2nvYNfDj2rY1lSjjUOWZRXKcGeTcmchXxl7TGyKxU9/79GJnijL7Gc1yKrBtzkTViTpuC5KK3pxwuDWbW5t7/Lc86/y8EMf4olPf4rZ46fIdXX/TooToOZJTiw2Wf7NT/KxTzzKq6++yUvPPs/6rTetXWYxxgiBKguiWs06Djk2FNYojdewlJp4ZDUSnmftrY0AXZQIYQM/XdebPkN/FQVtgtgcfW2mm8Zk8muEsXoeVU7/3tIqj9blXx2/2LF+6yb1dpf5YyfIdwPuf+AhfrK7Xuk+PFzXCsmjKCJOEpIkxQ9qFKkdAOEI5hYXOHXiJG++/Q6+HxJ4Hsl4QBa4vDjYZ262w8UPf5yzd12we00VAqyNZmtjgyuXXmd5cZGF+UVWV6/RG4zJRkM+dOEBNm/f5A//iz/k3/6rf4lUY5rNGr4fEMdjlBIcHh6Q5RkGRVSvVbRcQ6vVZnNzi+FojAFUqfF8S20uS4VAk6YpN1dvATDT6bCyuMTKsWMMe72puLnIC7ozM8SjERIH6UiULnEciEIPqR36vSEnT51mrrvIpcuX8X0P1w2RQLsZ0W5HeKFrIwE8n16vT3emCVhnL9C0mjXyvKCYNuW2eRLStyF+tYA0STh//l6isMXZc/ewurbH17f+mM/8+pc4c+FhpLAaSVENqxQCVzhYQZNgkkFoqGg4dz4vRqCMqtzUqGi+dpBaKoVT2fROnk27h+hq8GH3VGdCvzUgcKZrp8Aa7bzx+uvE4zFf/8Y32NpZw/FcCq148IEP0WhFCJ3x2MX7eP3NV+lvXeLiA3fTbHbYWFvnY098jFtrW7z88jN87+t9osA2LPc/9ACnjt9Nq+YRj3e5cO/9BLV5Xnz5bVavXuLae28j/ZB+f0Be5EhcpHDsWqEgTa3G1g880qzAEWNKDGfOnWN/d5f1W9dYvX6FD93zAMePnUYDFx8+RS3SeF7AQ098nHffeJ2rl97gMHF47MEHWVyKWF29zX33P0SRlfi+4P35DoeDEes9KwQ/ONyxrJpqWL29vU+RF2hpiBNpXXF9lzNnT/Puy8+QpzkISRAGqCLnxo2bKGMIwoDRQQ8hBcsrS0RRnbjQbO8MSIYjjFKEQYAwDmmSUJaarWKLvChI0jEHvT6mup5lOmJ/a4DjWIdVneWIvETkJc8/9xynz55hlMS8+PxLSMdSVYu8oBb6hFHEweE+GEm322U8HpPmGVGtyq7zAqLGAo8+/iRf//dfQzoOWV6wubWNVppGs0mtVuPyu68hZEGeDXAjHxG6DJOE6+9f5f57znPi+Ar1WoienWF5ucWVK6t4OuH73/02f/13fqfSgk0iaUHKSWREtbvb4EM7aK2QzN7BgOFwTGOhSZwMuLV+mX/3F/8GqTTD0SFIQafdYHPXnqvS2OGsKhRxpUkMAw/Xc0iShCK3mUNpkuF6AUVudXjLS4v0h30wdkiyvLCI5zn0BgNqjRpZknHjxiplWrI0vzCtN6IwJMl+cRbCL0dFu6Ox+Wk0Ytq8YH8vwdolGqbTaSks/m60oVlvcHB4SK3RwFGaUisevPcC2wdb7I6yKvHYFu6727ssLC+yc/s2jWadhx6+j6LIyfOMIk9RqmQwHJImCcYYirxEFYrrV26R53dQfJA4nnWjqtVreO1GtQhpvDCwQaCyxJjEiilzTehba0OjRvT2Nxj2hjQbETeuX6bWiBiPrVXl6bPLSFIW5iIevHiOa1dvAxLHdYAS35PkWU6jVefk2QUcB7q5wVEDXLE85TYHQUCrWScMfAuZCzmp5fFdr1ruj7RIFUsFwx0LdLVpHk2EP3j9ZFXUU8GIe/u7jOKxfd1J5Tj9OXahVtUfG8Cokiho8tQjj5D09hiLJmWtwcxMh0Y9wJMlnmubGhc4WL9JNuhx88Y6o3FGoSW3126wsbGBkA5+FEK10QSeTxhGaG1IBkOSUUxZFARhiFSauvRoSx83N+h0iHJiwlpEKR3KycTfqOlEH36qyDMGR1g6mt3MqhOjpzjZT43LJ6dqstlVqEyFupRKkRW5hWWrxslxHGq1mm06tCGMImqtBjMzs4RBRKfV5ubqDW7fWrN89SLFaEMyjol7fUyREmDY3N5lECe4jkNZlrhewN1338fv/+FXrGDVMLV0dlyXr/2br/Fnf/ZvKaRB+h7jOCHLsp9BBozROK51P+rMzaImdtHVL0VRoCsaHNpm8ExPhbAGDapQZHlBFPrUanXKImdwuIvnBdQaLYIwsgustCiOkHaxldNF9eg9fQCFYfKsTi6Xmb63D64wRwib1rZAA9t4aa2tmYmAuChZ3d5l53s/4tXX3uLRRx/kqc98lubScYw+ytQCjTAC3zEsdGp85tc+wuOPPcy7777PT37wl9y8+jLZqIcfSMrC0v7CyKJatWbN2uk6Dum4IHBsam4Y+riOIfYzgiig5ke4rjd9Tv9KbQ3T1scWpNW1LoqiMsewAxrfj3D8grDWQKsCoS3tVVWN+q+OX+zY2d3kwuIcp86d5rlXXubVZ75NkY5wHR/juBaVG40ZDIYAzHXnSEqX5bvvZuv2Gtvb22RJxhuvvUZRlISeSzxKyLMMVVrzje99+5vcfc+H7NIij+5kKR12d/fxHMGXf/evc/XyHqurqywtLHHhnnv4yCe+yMvP/4jWzByt+WXc0haF7753mVarQTyIiYLqgxg7hnJczzoNDQeMxqNK1G+F8CrJbMFWFe6OkBX9yrCzu8/27h6e6zC/MMeJ48t0Z2cY9Ps4jmA0HNimWilm2k2yPKU91+Fwb8jewT6z84tsbm0BlhorBNQaDTwpmZlpIxzY3dvDde2z0Wy1CAMfpUuiKERKQb0RMR7FjMYpQlo9Ecrg+gHCkZRlTlrmqKQkiprADlG7wfG7L6Aq8T/iKNNNSlDGViRSU22GasrMsPobFyOsw6gjHfK8QBU5QegzHMWWsi4kjudSpbsxQZUNlZUuYkoHnWjfTBWIbLAZQ/VWnXvuuxe0Zjwa8NrrL5KmKbu7A4pMMrt0jEDAOBlz/4MP8v57KbduXmNl8Tg11+M73/s283PzaJXTO9iibNW5+PBFTp46zfnzD7Eyt4AnE+qtNjJsM3/sHpa7bf7u/+W/4plXXsFxHbtnFQXLSwvESUxyOKiQfOui1qw3KbKEolCcuO8iXtTgxR//iHfffIuHHniYhx56hB8+9yNaqo1xfXKnge83ePzJj/ORRy/yp3/6z2jN1vnw40+ws7VNmqbcuP4+wpQ89PDD1Oot/sk//mPyLCFNM0u711Bv1EiSjMAP0No69Y1VjB943Lx5k7KwbBaMQWiBUIKZmQ77+4dWe+u5GK3Z2d4hjGokSYLWUOR5tZdIPM/gOB7aWA31wcE+XuBVdLERnivRqsRxHWs/XCo++vhHGY0G7O3u8muf+03uu/9u/tE//Iekqsr+0wYlDY7vYSQMRkOMgVazZRHOrCROErq1GoNxyqnzy7z9zrt4rsM4yarrUeJ7HkVR2n2sVFy//BYLx07S7M6xcuI0o1GPmXqDze0doqjJ/n6PK9eusXTs83zpt55gZeUEyyfOYFU1IM3k/mQ6+LZj9aoBF1Bih79ZkXOws8UzT/8lc/PHScqU4eg26+u36NQ7PP7Yh7l9+zajJCbLC5uv50gatYjhcEwY+mhj8MMQKW2zZqS0VHYkru/Q6bapN5s0ag3yTJEXGSUKoyHPCgRQZDnNepN6s8lsp2vrcYHNw6k3kO7/VBqbaRMDdxJFDBW3WymKoqyajpwszchLPeWrmjvMBADQmh/86z9j9/Zt0jzn5JnTnDt5nCTZYagsfQgjGA8GHNzep0hTzl+4C0OGkCVCluwfbpJn1rLVcV1azQbJOEYYybnzC+zuDXCFdVbzAo9Wp8FoaCf/nl8ipW2G/EAiXYHn2xtCqRxfuiBsUFQYRBT5Pq1OwGi0Sa0pKdIB2XCfdruJLsf0ezlNv+T4Yp167S5KA1EUoE1BGtvCV3rWXx4hyU1KvRFg++YJXeoo50BS/V4IS8tyPSYBh5PrMbkWE5tnI6py76emtnciFpNjnIw5ODxgGCfTQmpSTE1QmQnP2G4AAqVgZfk4F+86Tf/wANWcIVc5y8tz+I6LlOC7DqicPBkz7B+QjUZcu77GaJzRPzxkdfUmo9EI1/PwPN+mMvs2gFOrkng0JkszsjTDKE3kujQ9z2o7hIRaSOk4CF3Za7oOKNtMN5st4nhoKV4VrceRjkWspLQUEFOFaVaUPPTPFpp3Nu4TsbqqbJeVsoFqQljTBek41NzKJMBo3EovopSiHoS0ZroopQgdyT2njrN+e4P97S2M1oyTlLJIcV2PdqtNvVZDK3j/2g0EgrJyRPP8kEZtlr/9n/8tmp02SunK/c9SCl9+6RX+4T/4E3JsUxPHaTUJvbOhsZ+z3miwcvw4XhTaDJs7vsdOGK1WR6mJZkxOGxClFEHgkmWKPMuJfGthHNab5HmCEIIkTUizgla7xdz8PHmRMR6NydIUx/Gm1LPJe/uZxsZ+UUEXR+/9A2vR9P1a9Gba/wiB7/tTWpvvWdppkWXc2t5i97sHvPrqmzz11JM88ZlPE7TnyJWu9A7V+wA8Ad2GzxOPPciHHriX9967zNN/+XVur75Blu8T+JYmWRZFRW2VtonU1vnHAquaXJXTbCdHOoiK/vrzPtP0s3FEd4PK6rnS/E1S1evNGZqdGertObpzK/YZrZ5dow1ZlnLt9e//3Nf/1fHB4y++/u/4wdPf42Of/QK6GNv7WDqWAWBAKYPrBCCsVW2elQzGfe564C7yGwWqKNnf2Zvey4P+kFotwmAoipI8Vzgyxw9sASCqENBJAS4dwbnTZ5lfPEZ79h5qNY97zp/j/g89hKw1iYuS6zdWOXvX3Xg65db1a8zPdSnLnMXlWcrSsgPSzIZH16M6SZKijKLdrlMUmsNiZOe1RqDU0d5QChseK4yY0r5KBVubO+zs7NBs1lmcn6XTadLptCiyEUtLXcZxzHicEo8TklGO64S8f+l94uEYqQyFVrhRxGGvR3emw/7BAUma4LgO7U4bjCFLM8bDEUIIwtAjqkWUhbKWxVLQaARkWYnjSeYWZymVwnc9/vI7X6PMNc88+3VMaTVrf/Yv/4SwOcenPv15Is+jXq8zHo145+13OX/PBer1Bhht1x4JZVEQVRbIFtF3mRLBpaBQJaP9mIPDQ2ZmOnRmZir6Dkd7czVasXoFq5EFi4RIAUYaXO1WOWEGrSVu5FJkGa2ZWX7rr/2Bpe7Uaqxv7bK0OIfvO3zve09z8f6LzC8scuX9d/jxj57mS1/8LC++8gpOIJmda/DGm++ihc+nP/Ub7OxucXCwy0wYEnYbFAUk431293rcvFLwmd/6bfbHBZfff5dSK7I8574L9zIej/nJM8/hOg6O63D8xEkevPgwo+GAmZk2u/tjZK3L3Xed54Xnn+ehix+m02zw4+9+n5N3neGeey6A36TfH6B1wcL8LBcvPsi1Gze4vXaMMKgRBAEry0u89PIzPPzYJ0lHmyzNNhknAVev30YKO4QcD/dQxlCv1ymLgrLQLC0fY29vi1oYMTvbYjQak2UZ4/EYKSR9MbBsCWXdcx3fI0+twN9xbNC2I0PGozHjOMZ1HPKirMA7F4MmMhGqVKiytCiktEygg4MenZkZglqd9a0tMqHY7Y9Jc8Fst8vGxgZJmlmxfxAwilOyNAGM3RtchwQIfB+MYW9nF6U1l95+mVPHlliYm+XtS1eRjoMQhoWFeYwqGcVjtNZkvYKFEzU+/dlP8eIzP6I3Svn4J3+Tpqd4/+oNjp84za9/2eeBBx4lDCJcxyWrtHlg6fDOJL6iWufuGN3eQblkGip76tQp0jRm9cZlRsmIIlOMREyrWeewf0BeFFXtY2tBXQ4xKILIs67EQiKkY9cXQDgOURAhMIzGQ7SBwA3xfZvl1l2eYb+3RxD6uNrmW0ZBSLvZYWFhno2NdfYODm3zlWZk5f9Eds/D4ahyOtPTabep4FrbuNgmplCWklOqElPZvAopEZIPTBKFlCysHOfpf/8NHKNw3n8Xd2WFU/fex3thhDYCZSxqMn9iBaELBv19eoeZDfYUIIVD4Ed4gW+LVxwa9SZ5njIz22ZxaX4qdBLS0jnCeh2jBJ7jWUeLSk+hSoPn+kgpUIUiCO1GZmltoprASlRZ0GrWUIFHo36SQpWoIqUUMFB2Yh9EPjXXq5qVkFpYTZOksEJ+IVCqxHVqU3rNhMJig/y8IxoUIKS1F/3pSfadxaAQkg80O/Y7p2Puo1eDNE047PUYx3E1Da+EmdcqngABAABJREFUjPbFEQJ8P+D0qTPoMqF/0GM41jxw/30stAPGaUoR1XEczfHFBUDbsNEiZ3d9i6Tfox4GlFnKtZu3GYxGrN28ye7Oni0ipYNSClmWFiaWkmQcW+/5LMNzfRZmOsw1Gyy0I8Z7A5I4x5UOY0eg5SToEpw7Cu9Cqyq4ThyZHkyQHGWqe+QOxEBPnLvuaGKMmYaU2nvZ5i+Z6n43xjA7N8tst4vA0ovKsuTm2g2S8bhy9bDv79yZ0zRaTa6vbrC7s8vh9g67+z2MlERRZBvGKCQKQhzpIowLxqXe6OAFEaV0WW516HS63HfhAk9+4qOoUk8paEIItja3+Dv/97/HKM7wGgHjOCbPCz4gVrW3HN25eeaWlsCRFfVuYsN8dHNoY7OTpsjC9PzYAYUwlk8+ONwnG+zi+z6NdoN4PDrSyGC1OHVh6Z3NdptGs0mapCRJat3eXHd6D3+gqZzez0dNzU+jG1MDIsc+mzBRztiQ1UkquMGgdAmORLoW0o83Ntj7+jd57ZVX+PwXPs/9TzxJ6QYcyVLEFJ10MLQil8cefoAH7r+Hy1dX+fF3v8Ht1dcoyj5G21TsPCuJ42yql5KOnZ2Zar0s8gIT2AJqSiH9wCe+k9xoMEZMXTsmznelKqcTaSkdhHSQQiOlO81ssicHpPqV3fMveszMdMDAO2+9yiiO0VqRZxkSQZYVnD13DMfxOLZyjP39Awa9AXsHh7z5/Ev0D/uAmNIt2+0WRgrm5roYbRgOY7I0A3z+/t/7u/wf/o//Z2a6S5YNAqDhk5/+NB9+8AE2tjc5fXaRevBRhBQMxjGRcFhZWaLbiXjtxZ9w5f03UKoKhFXWOjwI7bBgZ2ePZrOOLg2tZh0tDLVawGA4Yjx2bEDhFGE4eq4EBs+TaO1SqirLRdjohF7PGgSEOw6tRsTxlQWyrCCK6vheiO8FuEKSZ5qdnT3i0RiBJDcljZZgPI6JooBmo4EwhjTPMKq0JjDa0mTyLJ/SaT3X7sdBaIcTWupKGC5JBnFl5WtwI0GaD3FNjd7BkGee/ib12QWurl7DlfDkR55k9foqP/nJd+nOz1qKeVAnTTUznQbn7z7DFz/7ZVzPtc4i0n7e8TihFoVcu3aTsshwPYflpaVpQ2jZEMbaCRtbw0wGtwgqTUP13HK0t1Ah8sPRiDAKufjwIwyHI5TWdLtd5leO0ajVMAb+s799GtdxuHGtzgvP/ASHkna7w9xsl0987jdYPHaaZ599npUTJzl95jSuI7l57X1u31yl3mpy7fI7jHoHvPziC/i1gOPn7qXenuXk8nFurN9CGsne7ohu14q4hZQYIWm0Ozzx1Mc5ffZu1tY3+MvvfZfNrT1cHXCwfZ1/+Pf+Hvecv5e8LNFlyc0bq5y66156B7vE4yFZElNvtdnZWqPIEsKwQXd2HilL0ryk1lnhwt2neePVF1h/820EEs/1idPU3gNa26woYy27e70BWZrTac+gSk2WZvhhk1IJknjIoG+jGxzP3tuy8k+XwtqGF0rRbrXtwMfATLvF+vYmRlE5dkKaZJhJkLUGJSVCSlzXwRjFy6++SK41J1ZWGO+vc/lKk4XlRW6u3WLidmn1U/EUqVtcWKDeaFhquipJ44RavU4SJ4x6PdaUZm9/D4B6LbKfQUr8WgMvChiPx3huQCcQfPsb/4rB3gGlcXnj1Re57957mOm2iZMxOwc7nDx7H7Ozs9aavCxI4xEH/R5LS8cYJwmNer2aEZqpYUC17EzrQ9f1uPv8PczNz3Lpnbd4qjvH2uo1rl96h+7pu7j7vvu4snqd8UGf9swMB70BeZLiey4zM01OnztGUeTcvLmOzLPqvSi0MpSOR6NRw5UufhiS5RlzC/Psbu8x2z3B6q2beL7N2znsDzlz4izj8Zher89gOGQ4GBD6Ib7vU/wSe9ovp7FhIsy3dIjJiToqBm0B6GirexFFUU0uDVoLFEdUnklh+cATT/Dmyy9x+YXnKUvN5uVr1FJF7bHHGLsSRwqKNOcgTkhGQ8bj0RRfm9TwUkranRZRzUdJq2tx3AYIRZqlFLkBLVGFBm2tnx3pgBGMhom16CtLPN+lzDR5BQ8aDb7n4ToeSivKZGBzXqTEccEow2iYkqUpySCn1qhbSBSBVrZjdj1vSsNxHAfHq055de7CUFcokQCj8T2fKAoJK/G21rqyr6xoPPz8Ym8COU74vWayAN/R4Ezm0WVZMhqNiJOEsgpcMdPXsPod37Mhm/t7h5RpjMHhicceJXJL4rJkkJU02y2a7RqOMKTDEXvb2yTDIaHr0m432N87YG1zi42NDW7fvEleWCu/srCiubAW4UhJmVu3DUfYpi8SHqLQFNmYWMBBJCnqPuN6gOdKuwkqNQ2yzLOEIklwXA9V6Vsmp9hUVLM7NTM/6152RLsrVUmWF1XqtXXRCcOQwLF6GOkGzHa7LC4s4DoOnWaHuW6HXr/HxsYaiYaysLoeKQS3b67TaLTw3dBO7b2I0+eW6LRbNp9ISlzpENUiWrUatSjE8RwC32X9oM/W4QjH9fBdn7/21z5vAwHvMAso8oL/1//jv2ZtY4ugERJnlUW5lLYBuaOEnl9cZGZuDiOcab97B/NwMsaprNGrPzdH949BIxGM+jE3rr5PPu5x5q4zRLUaUVRDCltgT3R1RWUnDVWmg4RarU4YRgyHfYoyx/f8n2lspmW/qJDHO+7N6huY2HFLeQeP3djnoigtUqWUolT2X4sKbXZda9V6OOgzymI2/uRPefi11/jSH/w+jcUTFHrioWR/thYWCRQY6r7Dxfvu4sL5/zW7e3s8+5OneenZb+G6qW0G89LqbhwfLVx0mVTn7Y51T8gj7v4H78Kj3955PYyxOhptc2wc16noaFVxqjVaabQU1tp6+iz/fDToV8fPHt35LvMLixweHLJ++xbjcUqRZgRhgDaQxCmlGnN7bY2ytLQRVxjyJMV1BFmWEzUanD9/noWlJfZ7BwjhsrJyhnqrydbuNtloxPatVTZur9KaWaBIcrI0pV5v4Lgu9Zk5BpevoZTi0qUrnD9/F6NRzOrtNQa9A65cep0b197i3nuPsbPdZ29/SL0WoZQmHqUYDH4QII0k07nNiKnWyrK0qfZB4AAuWZpjMET1kCLPaXdqSOGQpDlG2MyTLLZolZQTxMqjVqsxGA8JSreimknKtMR1JIFfI/B9giCwtr5aU+YZYEiTlNFwjOdaJDlNU5aW5nA9j/39Po70GA1HNig3rBxNHYkG6vUIHEF/MCBJcopcEUYuYRgyTlIiIZmIpeP+Lm9vrKEMPPeT71SOqJLdw20c1yHwQgI3YOu25vy5YxZ9n2TbKDvcK5Xmxo3r1OsRntdieflYpdfRFRqqq0m3Xd9tf1PtNwhLcpswDY2ZDjqMMuRpget4thn0PGZmZqZrWT2qT1FmKR0MgsHBAFmCQnH12ir3XXyU5eN3EdQbfObzXwQD/eGA62urvP3Gmzz7zA8ZD4cEUYjKE06tHOPEubP4tQ4rFxfZmp+lfeoMw50tLr37NksrSzhVEHea5+zt7vPtb/0lbvA0J+6+QFFkHGxe48zxY/z6F7/I9771PTY3GqSUKAoeeOBBtrc2OHViGd91GfX7oFO2tg85GMRkekCcxvzoe1/DNWP+xT/5OywvLHPX2fO89OKrNli02qQd10VobdfnahiUpTFC+hwe9lGqpChLznRnGfQzjNY0Gtb1r96os7S4yNXLVzAYapHNtOvOdOiPcz7/63+dtatXuXL9XTv0rJxRVWnrB+lIG6Ts+/bnFAWtmVb17CgazSa/99d+g+/+xVd5/dnv2lBypaY5TGVegBQEvg1Vv+eeC7iO4NJ7l5GOYzN50oyy0ORlxmg8pizKqv6xTqD9wz5hI7LMlSBgNB7z0gs/QZeKMAiRnsvbrz/Du+++jDSCPM/ozMwSuQH9++/l7XeucP9958lyG9QrHIe19S0W5+YJw5CVpaVqbzAV22ESJmv1rRrDG2+9w4Xz99LrH1DzHV5stxAmwvEbLKyc46lP3Mv7773B0rjHzdVV4mFMmhTE44StnV1mWh0832fYHxHHCcYF13fpD0e4jsd81GA0HjPDLL7vsrV9iyis4Xo+e/t7FEqRqZJmq4UjXYq8tCG/2uAhCP3g5y3fP/f4pRob3/dtcnb19Z1ltZkUAaZEVsXxJI/GTrRsc/MBlyljO8cv/eEf8A+uXmJ02KPtgd7dotHvkXU7CAyO5+J5NcIwYnZ+aTplADMV803KH2OOQjili11waxVNLkvJshRd5JRa2fdvPMpSIISD0g4mr+hgxqXIreMNppguVsNRNm1QHCHJM4UxLnlpEJnCNRNRvtUIobCmFMJAWSIKXRkY2J8ZhM60qxBCIB2J6zj4QQCVp75tbgRuReG5E7URd1SmE3RMT7Qj0+s0tXVAG+s4FsdjirKYXsTpANw9olFpXbKb7HNy5SQf+fD9jEYDBmnJKI6ZnV+gVgtJx322b60xPhxQC31azQaNus/a2m3WVte5cfMGuzs2kbszPz/dGKQQZFlGMo6t7TSCGS+gGzQY6QTluzieixawszei3u6QuoJ0NLCLoTzyagds2rwzmZ7d4W2t7wg7pKoVp1ocu/HkZUFWZRshrOV2FNasKN2p6EPaEIURzWYbrRWbt9dZ6M4wu7LA5Xff4vL1VbKipFarIaRLPaoz253l+MoJjh0/zsrxZWY6zSrxG4RWlFlGGidTqpIrhOWkeh650fQHQxzHRwCPP3aR46eOUZZHFDTpOPzLf/6veeHF1/EaEaWxoXJCOpP+BGHAcR0WV1aot1pYbvjRRRd3fD25R0xVUDPRKWlrJ651ydbtGwz3d8kya+FtXZ1K6vVaxUE306ZJGypKoH1epbFvyHEEnZkOeZYxHo2m9LQ7rtL0GZ5+/cEFp3oO7nivU3TTPuvScY4QjqrImBxSSoTnkSQJjoDX3r1E77/9x/yt/9V/Trh8wr5fIybADQBaGDASx0DoOhxfXOB3vvL7PPaRj/OjH3+Py++8gOsW1g1NeEjhYyiR0g4XalFkhxpiMjG/k9b7wcManRzZj+sKVbT27A7KGPK8YDSOSZKMJMmna6IQlpCmfgnY/j/1YxyPiG/G9A4O8T2PBz76GK+/8gZCOMwvtPGCiOHuLlpphDDkeYrdeyDwHXwvZHZ2hi99+cv8+2/+JWtrq3zu13+H99+6QmFucP9D99OoH+djH3uKk2fuZm97l3ffeoXRaJ/Tp++mXu/Q6x2yvLxgqbdhxHMvvky9EXL1/ddo1z12d7dYWZqlLMAPA9I8IS8zAj8EqYmHMY1Gk6hWw/cC0jRBul6llYOZmTqz0uP2xh5C2hy1+owP2iWKPISRuL4kKwuKQuMUAl0opOMipSDPCnq9PrWah+fVK3TFYRAPaNYaOI4EI1lcmMNxwPFhb29AoxbSaNQ5POjT640w2tCoN9nbO6TRrBG4Plv7uwA0ohqB7yA9lyQvGAxGRLWQqBFZjUWpSYsM15WkJqfIS7wwmGpTkSXtRsT2zgFlZcTjBxFpmjIeJozUGFc6zM/O8mf/4p+TpopPffa3CWoRmxsbxKMep0+dYXtnl2MnTtCZW6AszbRxkXLyzFZ+UlJa5giWTWFMResRdqA0MV8RGjCGZqtpgxKxgw4pxPTZlhU10VTrrsZw94P3cfyu0+jSRhssrizjuj5FkvKDp39AvV2n0CXDwYCNjdsIR7G5uUaZ5zieh++4+M02H3roDLvbu1y+tcX6rVWWuzWkdLhx/QbSlXiuR5blrK+vocqMIArZPtjGSJcnP/kUZ46d5KmPfJI4Sfj+d7/L6bvvZfX6NVSZsHLyLpaWFjl54hTDYY/d7R2CIGR7Z4sLswtWSzIY4TkOBxvvsXP7PV59yTA7O0OalTRbHa5euYI09nwur6ywfmvN5guWhkZ7nri/DVohDKyv3WJxoUunc5Kr128iHYckTtjY3MILfKSEIAoAw/7hAcarM3IkV29dI86GGGEsPdl1SOMqa1DYPaFUZRUw75COE9qtFpmTcfGBB3nswQ9Rbl/h7/33/w7l1/B8D8e1VOR8NMbxPdIsw5UO3/vBD4miiDQvcaRlkBhlSLMcz/NwHTEdFGhls28MhjTNGY6GuK6L57pkcULgB0SBT5xl1BsNfM8hjhOKbESZOVy/+go3rr1Jd/YY//arL7G11+PJj/0awpG8/967JGfu5YmPPE5e5LaerDa1Ms842D+k3W7afQrB8WPHCWt15nyPjcEhC8ePsRsf8vqb73D23H1052e46D3A9fffJhuNWR2vUWQ567d2mem20MbQO+zhuV5lnuOChtAPqDc6+F5IkiRcuXSJ+dlZ8ixDCMHttU2LfJYll95+lzNnTtOo18jyHEcKlC7xvAbjePAza/dfdfxyds93cNB/zt9Ov8fK9G3xqj8wO/zgZm6wupvO3Dyf+M0v8Z3//p/SFVBPE+T6Gm6njXYs3OcIiec6U3rMhIZk2TQCIR2bVeLYiaa1abXEN9dx8YOgmgQ5GKUoipyiyClzCxeqvKAsC5S2GgptbMAVMP2ZjutgtH0Aje1ZkOHRKSwRmBKkYydFWglKrauv7eu4jhXwSWlzeoJSHVHAmHTQk2AxF8eZLKOWuuNW3NFK5T+9FhNtzOS8fPCyHBWuRVEwTlOyIq8cYsUUOp84OakKai7zgnvvfYhHH7qXw8M9DkfWMnhheRHHlKxfu8Te5g5hEDEz07HhTrrk3bfe4sbNNdZurVWGDgI38HBcF0cIsjiptCUFruNQD0KaYUSrcmqL6iGZUpQVIjMJbEROI9Iwk4LcmEqzYc0HVFlOC90jKtXRNSwru2VVWtRDV6/ruh61qIZAUJQFWZ5Pg/RMZsMPFxcWOHl8mf39fdIkYW+/xw9/8jxxWlBvznByfpGTx09w5uQJjq8sMNtt4LvWNKLIC5SyG06aJCTjBKM0Qk8EvBUc7Qe4rsvVWxsUuNPJ0yc/9RSl0hVKZd3WXnv5df7lP/83aAeKsiDNUkBW9q4GKQy+77O0sowbRZP+5Oi+EEzpW8aIitF1hKiaOxrEIh2zeuUtxr19Gq02nudS5AV5niMdB98L8Dx3qlcyWNqpUiWuG1RiXosICcc+l0EY4noeg8EAXdqE7ImG7K909ZoOMqqjcsC78zDTAQcYdeS2dvT39nX8ICQIIqJWm82DHt/46l/wR/+b/5IYURUqorKvt/9N+idpLLLoCTh9Yoljf/2PuL3xGV547iesXXsDnQ+IIg+j27ZB04ZaZCk7k4yin15FzfT/BWIStFttQnaSa0Xb0vFRRlr9RxrT7/fJktwWltXKC9a+/VfHL3Y4wGg0pMgzOt0ZsiTj+MoJzt51gVJKtN9g74ffo0hihLB6J8930do6ZfmeTxL3+ebXv4bE5oq9/NIzHOwPaYaS575zFenCxQc/zP7WDr7rc7i3xmF/n5vvvYnOodao88AjH+Xw4BDXD7h67TL1ZkkyOKR/kJCmMdCiNxjhh7YQHY9jujMd0jyDAoaDEYFXI04SPM/FDzzcVFKvRQjH0uq0yam3IpvWrjV+4NrJuZIYWTIsMhzhUqv7JLGh3WpS5Dl5luP79emEOQoDK5wuBbdv7+L5h9xz/m4832VxYYa9vV3KQlFmirEZEXoebs0hThNmZzrsH+4z7I1oNZoszs2SpCm1MKRUJUVp0ZHA80BrVF5agbXvWYottiFwXbcK1DSkeUGz4aNKayGdpAo/iCgNNKI67ZkF0tGIZDhiZ3sP35c8/YPvcPquBzl7191sbm7wztvP8/KrAR968CM05+YpjKksa4Gq8J7oLbXRSEMVdjjRxFVYsxUtAZP4CzNlBQismYHRE1ZFNZad0sCrgaUUNo+vbddHUzo4rtWFCAyv/ugHvPLu6yydOEGr1SYdDzjc2eP4sRUb3JnnhL5k9eolAulR5orLrz3LYNBnsN2i0ezyiU99juFgl/cvvUuzWSes1zAaGo0Gh1trdBdW+PxnPsfX/vVXiZOUWrvGwmwNipzZ7gkWF0+RjobcXruO6xn2djcwpWZ5ZYHvfOvPGY8HfOiBixjXkOQZjh8yP3uMq1fex4sz0lwxHG8jpeSBBx9kFKecOHMPv/3l3+Uf/Df/H5QR3HPuLG+/1UPrgjxL8NyI0PMZjYYVXQzQhtFgSBi2wIHF44usr66DMqhyxEvf/XOKLCHwPQLp8eHHP4z0XTZvrfP+u+/bc1/YOsKrQj/TVNFpt1maX+Djjz+O40c88vDDPPjiKzx/ZQtlDH69RuS6pKOYWlSzrpUGRqMElRvKQmEcl9EooywLOxjBxge4nocqFWWZE/i+babynKIsqubHqaIrBIXSNBoNlpaWkFLgug7D8T5JlrK6dgVVavb210jjmPP3PM7y0jxbt6+RJiNOnDqD6/lIQdVw271HpUN+8K1v8PFPf4a9wz3uPX837739Dh9+okNUb3D2vossvPYjdi9fIR/2eOuFVdqdOq5TUmYJjVpEPYxI0oy8QlKDMCBNC4JmiO8LlNJopWg12sx1F9FKYvQBzWadokjxfc+yd4RLnueEgY/jOOzt7kLZJU9zXNcjTwt8z6deq//Ca/ovTUX7+UcFbWELeiEqZxCcI05qhSZM/bMnlCBjKJTm0Y99gjefe4bV99+lnStqm+s45y9g/BBPOniOsKnN1U88mtQeFUHGVA5Ok0nz5N0JQZplOI6L7wc4no8jPcLIx2nYgnuyuIB1YlFKkecpeZZR5DlpGqOVDb7SpgoV0wohnalY3DZqVIWLXZyVUlVQqKzqIysUn7hLhWFO2bBwtkEzHg2Jx0OksAub69oATSkF0nFwPafy2z/KSKnOiH39O3QJ1YCcKelIQJZat578p5yuMJoyiekf7LG/s4suJL/3h3+Te+4/w9bBDgeHY1zXYX5uhv7OJgdbu+R5SRg2iSIPRxryeMyld97i+vXrHOwfWphXSgI/tPkJSco4zcjTFFdIZt2QetUIawSJIyiNnupeJgUolTOcrK5f2Gjbqbd1NcDokrh3iK7EbQ7CwhWVoUWplA2GLQpKrSz9DsHJlWO02xb29IKIMAqZaTY5HPZ5/oUXyYsM6ZRVra9ZvXmLg+1dgiDACUI63QVWjq2wcvw4y0tzdBohkefiO9Z1RBcpvV5MHOcV6mZTuc1kM3QFjmOpaK5bZfZ4Hut7h+zGGUY4aG34/Bc+TVir3ZFXI+kdHvJ3/p9/j6TMaXdb7O/tMx4NEMLF9fxK19JkfmEOpHWe+RkXrslme0ezMJlITs6fMSWHu7fZuHEFVZkcGG1dvxzXJYjqNFodBLqiqH6Q6qeUwg8kKMUEOzzKqwHHceh0OoxGI7I8sxRIxAdeY5rdAlO6nGD6Zm3hwKTxsOuNrmgNWXGEXMiJ8r56Tc/1cD2fOI7xpA9OSIVhMdGZTQ5nAvwwCTqr0pyNRrqGsyeWOXviD9jc/gzP/+iH3LrxOkrtI1BVOKyD77rTBu/nU8Wqaa4AMUHMDNaCfXLepIMqDUma4RnNsN+jyOx936g38PwQ1/WR3v9IS/t/Asf62m2EdEiSlCwruH3zNn/0h3+Tw1HJ7/3eXyfFY7Szw+q11+k0W2xvbRHWbMbRaNQnL3KEkNy8foVaLaIR+rg6pRVp4sEBs+06nu+xefMGw4M+x4+d5trV9/FcSavV5TOf+yLbW3ucPn8vftjgmWefp93ssrt9hb3dTVzXPkdRVMMLfHo7e7iunTorpciTHJODjFySOOFwv0d3tsNoUDDT6aCVJklTVFmysjyDNuD5TcrqnlGFotcbUaiSeq1OnuYYY6jVA/r9Po16nYXFOdLxkE6zafUisS1mXMdDa1O5iaU0my3Wbu1U1K+IVGSMhiNarSZKFawszSMwzM50p2thZ6ZFpgoODg8RUpIbjUbjSIcostTvXBaEfghAFIb2c2c5yigcz8UP7EAxDEM8X6DKlGwYM4zHNNod7jpzN+1Wi7Ub17l57TquGzDTneF7P/gmL7z8As1aRFnA4eEhC4vb7O3vszA/T5aVeJ7HXafPWTTWVBMOaelpE1qaI+264eDAxJDB+k4Dxtq+V6YNekJYqVhr02ZJTNZOixhP1uRWs02r1aac5IlJl0998bdYuvsc2zffZ3tnl/3DQ0ZpzNlzp+l0Ztja2uHzn/9rnL37btbWtnj6+99ica5NUWR4vsPFhx7ErS2ghgOyPMUPAowULCwvcv3996kFEaO9PrdurKJRvP/eG5w5cY7f/r3/Gf/4n/xjouaYk2eW2dncIKzVmFuYx/N8rt24yo9+8kPGo33+5T99j/c+/ChpOaLVbFAWmtdef4N65NPrDwHN8uIiQ2kY9Id05hbodhb59Gd/ixd++D2ef+V1Hrr4MI16nUuX32Bvax2EodPtcmN1FeE65HFaDX0dijKh5oesXb9NmZfkyhDVWpw7d47ttessLi/y2EefYmGxy9e+9S3O3vsAN27cQmhDmRccO36Mw94h43FKGAb83ld+l8D1mZ+bAw2uV+PC8VleePsG0glJDocE9ZBz589xuN+D1D6nM+0O/V6fMlcUUtm93fEQno3wqIWhje2ommFtoKiYGGAHWVtb25RFSavVpFQKkRes3rhJq9Nmdr6BlIpmo048jgnqHmk2pNfPuHzzFifPnuOlF57hc1/4MvVajcP+AKNL9ra3mF9cpNPt4vsBvcM9nn3+WWqNJidPnmZubo5Xnn+eYZKRp/tcv3GdlfklDnsDlo4v0NvbJWxEDJEUyr43GzyvGfaHlY7dYRwnNlpF2uDYOEnIspR+b4AqFVlmdalFUXLQO2Q0ThCOzcUJXI+iUOzu7TMaxXiOoBbVGcfpfyAT72eP/793vw/kg4jK573ivwspLe+bo+ZjqnuY/pm1Epaux8d/40v86ZX30J6kjAfMxDGiHlqI1v6Do9dBHOUBmKNwP+tIdNQ42UJHUhY5mIREjsGIqS2s47pV52+mfFM/CCpuZEgtqmGEbWR0WZBl1qJPG2VtEqVAlzaLJM8LsiylqLJMQDOORwgMge+jtE0PD8PQJoMLMHh0Zo5R+ReQxGP6gwFGC7yK+ud5tgBzfL9CKuSk5rHIzVRPw8+US2JybbANV5amUztfg6W3jfZ2KYYjtm7e4GB/n5Xjd/Nf/O/+t7Tna6zv7nDQG9FsNDFFxvq1a6hCoZSkVBBJQS10GR/u8earr3Lz5ippmoGwwsBaZK2b49EY3/eRxtCuN6nXarjKNqcaKCq+p8FUjmVHzarWugrMlEjXAzdAq5KJ3XU6tk1nWeaW4lXYa5PmGWVR2gwZz55HqR1CKZntzrM4N4tbQfHHlxZpN2psbW5w6+pVXMchDAMwAsdxaTY7LMwvsLC4yPzcHDMzLdq1iHroEXgOvmebb79yXsuzguFozDhJKEtjRZHVlNFxXVxH4rh2QfYdCz27UjBMM65s7JEpkFLzwAP3c+/956f3jqigkD/+B/9frt9cJWw26B0OEdKlMzNHmsaUhWJhaYHmTBeDPEI5qnsBI46SiW33aM/1NIjTBnhlcZ+16+8x2N+xtDzXnaZut+dsKrnrBUjHw5XWunZyXSbHkcnIB0t5c8cEVAhBq9kiSRJGw6FtbhxZTbcmQXoVCjeZjpqptr6icEyQyqPn3nGc6X0/CQBFiIpCFOI4ktF4jOf6XPzwQ3zlb/9NCmnd9vjpJlAcNTuyOn+2vrHjCiEMnudw99llzpz8G9xY+zWe+8kPeP+t5wicFK0M0nUxZqKx+Q8tqlP81tJSSmstXhRllUiPzVahsoAvNa4rcRxZWXPbcMJfHb/YUeYZBgh9z64hQvPue28TNeZ49oc/ssF7iwuszH2C9fWbHB7uEUYBH/nok7x/fZWD3W1EmbC7t0smQBY5qJI8z/E9iR8ECCEZDKwY+nB/F1WWtOYWkBKEZyiNJh4eUmu2uf+B+9jcXGPQG1idw+EQP/CoN+oUwyFhKPEDnzqGeBQjjUMQRTYgt1B02s0pkrqxvkWtUacsRzSbEUJqXM+pgiR9/CDg8tpNdAmdmQ5BGLAfZ3i+b5+bKGQ4HOFIycmVRcLIIR7HOE6IyhI81074a7WAwWBIFNUxSOI4I89ztCmYm6kTBB7dmTr1eo0yVyRxzubWDoHv06rXQGs8zycrbFaTMNh9NdeEkWvRnNKgbUQPRVEgjKAW1snSkvE4JgxahEFEkmS4jkB7DouL88R5zuuvvERelNTCkHqjQRAE7Ozs0uuPQDgsLC3Ras2ysLDEN77+F5TlkJmZFg888DiPPPKEbSqMoswL/CCozAIsaiMQloYvbRbdpJ9xjDPhUaD0ZHRpD8NkialCwas1V0+GxEZMw8hNNbU5Cs0WPPT4E7Rm5/jz1WvkRcqHH3+YJE7wHJeZzhJK+Xz/h0/z8qsvkacjtre2OX7iNOOkZJTm3NpYozUYs3nrBkvLxxmnI8bDIRc/9wX623us3bxN967jHO72ePRjn+OZb3+b0ngkxsf1PU6fWWR+eZY33n2Ls7UWrtfk5PEzzHcXuXTlKic+/ASvPP9DLr/zFrkqOHbXKfzQw5OgihKlDQtzswxHMfsHfTZ395lbHjE3d5x3373E6ZPneO7FFzkYDa2t885tZmbajMcxt9ZuU6vVSQ57qNK6nbrCQynF/s4ep0+fZvnuY7z7/mVcz2dna4ML99zFV/7gb3D3Xffxd/7f/zeuvHcJlN1LitLWYpvbW0jp4Ps+pQI/rKPLAoHdv7d6Y04cW2S25rGbpWgjcApFmuX0en27pyEZpEMMgka9TlYUKK1AGvLCUtEmbqVKKxwhiCuNjuvZ+jOJU6IoQgRmWhsorSjynHg0BpGRZxmO49EfDThx4hjxuI/WmmS0xysv/xgvDIiTjD/54/+amaVlPvbog3zvW1/nxH2P8OTHPomnChaXl+guL3P29F24XsjC4gp+WOPK1XfZ2bnF4TBGiBG7O5usZymL87Mc9PqUWlPkJY1G3Qb55iWl1oxGCTggXUmZlxSmZFQoup0u47jHcHRo6WWOQxT5KG0xzHototlqkcSJHfQ6Lukkf893abc9siLjg9Zj/+Hjl7Z7/g8dQhxNVy3/VE0bmcnfTRyMpq9pXxilFWfvu5/zDz7Eey++QGk0nXiMEF1rw8cRCFMW1iN/kkgqhKx0FQaJM3VtU8oWuBOXrKKwPtgCged7CCRpNgaoFmFLN8OA73tgbOGpjWI4HFSp0BLHcZGug+fbwEXpSlzp4zcigiKq7KcVaZpQUpClCcNx3zY9RQ4IyjKnLHNa7cVqimPpVclozN7+AfE4JqqF1Ot1orBOEEa4foCrlNV9SFE5Q91BRpvk11TFnZh2P/bbyqxCLZQVfrpITByT93okozHD/pDHnvwsf/S/+M/ITczG7h7DYUIYeOxt3UaUCq0Fo1GCES4LS4vMdXwuv/oSr73yOru7eyAcXDcgCgNc14Y1jccpvuvRDptEjot2JKWUlK6d8E8DL6fagyMOszEGB430XXxjp6SO60zpQVmaUqYJuijJ48R+XZaVcYNr6U3CmiFQnZO7zpyjVa+TphlbO7sMhgOK0ZDxYMD2YR/tuizMLTA3v8jS0jEWFxdo1RtIx07hAkcSeS6h5+E7At9x8B0HKayYP8ty4sTS7ZSyEzopBF7g4bmWw+s50jZDrmed9pQiSTMu3dokVpayWKs3+PXf+PQU+TNa4zguP/j+j/jWX/4AN4rIipI4TtBGIR1Jt9uhPTODdH1KbRduCZjKrMdM7xj7YE5REI4akLLM2du4yc7aVcrC0pyMtlS/VmeWensWL6qTpCmlmlikWi0S+dEtWc0eYdp0mw903mWVI+G5HgibLu55Lr3DHi6ufQarF5s0EBXRDCMqtMlURgdV5s7kBwtsoxb4PkmSTH9mEPj4fkCWpuS5YW5mjt/9vT/gyV/7BKW02Vt3op53rn2TP5eVKHnSmE2QoEkT7kjN2ZNLnPrDP+L2Jz/LW6+/zPV3XkGroipwjhqbo+tx59eV5YewvytLixrmRW7NEgSUugRj/y7LMhxXUjgFjuMhpf5A9tCvjv/w8X/9r/73NBsNilJTal3ZIdsMliQtmOmcYhSn9ll1n2Dt9i1r3x41ePiBe/B9lxuXLyEcp6IwW5OHLLVZF81WkyzPSYYj9vf3adRrDIZDBsMx7WaLve1V5ueXePf15/nM0jKiSIn7ezimxJVOZXeac+vWGrVaROpBu9XAFZIiVxRJjnLtvdBsN9BGE9UiG3HgWzF/rVYjCl20KXEcTaNWJ/Ij+qMYKaHQJb7jEDoux1dWGI3HNJoNhsMRtUadQX/E6q0tFhaaeI6PVhAG1tVxpjvDzta2XbOikCJX7O7u0e/1mV2IuOvCCqPhkHSUs3K8SxLn7O30mem0KYvCCo2lHSDliQ24rdcjsiLDczxQglpQZ1QmONKxOXVZyeLcApTSDtIch8PemDAs7HOhC4w07B3sE2eljRaonDiH8Ygg8NnfP7ChpZ5LnMTMzqd0Oi3ybGjbDeUwPOzjCME4zTg8POTm9StcfOgRavW6LUwdGA/GSCTNZgPpWEo8QCnvNNi1k3lLMwMmGVpM+MGTwEQqja6lrE1hnTuanQkN+/SZs9xz4SJJMaI0mt21TX7zt36b3VHOyTP3cPmdNxgebnBz9Sa//7f+lwg3Qtbb9Ad9ilKxPxiQCUMRp1y49wLX3r/Ca6+9zae/8Nt899vfImhHPPuTZ3ng4Zx6vcFLrzzHTLuNcCUHvX0WF2f52K99nEJJGjMzBJ02USNicaHLoD/gwoWLvPjCD8jygsE4Z3b5OIPDMdKx8Rt7e/skaYrn+wgtcKXG8wSNVov23DKBF/Dc09/EcQz3338/WuWsra6z2F1mlMQ0ay3Ggy08T5AmsdXL5gUHB2Ok0yMZpsx0JcPBmEGywHuXNzh5/AyXr1xFas3WrZu06nUO0kPCMES4EmUMoeMQ5xpHumRlRlmW9PqHjJKSoN7h4vllvvPaJtIJMKWhfzCgyEsm0grPcWwTaybOpYAQBGFAluU2bNb3QDooo6bREWVRUKvVcISs5BMeWml6vT6u69BqNgl8j2FvzCge02y1WOgu4AiXbmeewM3p9Yfsb29Rb0qe+dFXiaTLzt5lblx5jrIs6Uuf5swsx+e67O/vc98jT3F85Ti3NzdwMaxvXCeJ9xmODux+41jqeK5KMmUYJilJGlPkBVFFJR8l9jOVJVAaXEfiuz55mlmtUJGD0GhTnQsMea7wPAfPc0mSlGGVERaFNkh2NBrbbL8kp6jsuJMqq+4XOf6jEJsPGABMv64ePlFNGhBH6I040tZMNDiT30+E3pZW5fDJ3/wd3n/nbUZJQpGM8LVhFPdR+RhljKV9YRuXIAirgELb/QohCDyfJImRjqV+5UWOkJKyLMgy61KklZ4mIButyKuJnVJ2IwHs31XUtqKwDUlZlFMuu51cC9wKUbFOYhUq4LpVRktAvVmn0apb2pQQGF1aS+zchjHVWm0m3mX25yny1Gp/kjQjSVJqUUKj2cELarhFieu4OEiEc0fDWJ1Vp3J9mugjTHUjYSDL7EOqDegkZufGdfoHByTjMcY4fOkrf8Sv/fonSfIxOwc9WxCWJfv7+0RRxDjLyXNFe3aWqObilmOe/fb3eOfNd21isesR1WqEUUSRZwxHI8o8Z2ZmhrnmDAGWOpYJg0Af2Y7KoyJ7smhPrQknFteOBNe1zWqeIlzrnJOM+qTxmKyyinYdl0bNhkOWhf0ZWivSJLGaGqXYXF8njQLSTCEcB0c4bB6M6XTmefieiywuLNBqt/E8iTATLZfGlZLIcwhdB8cReC74vhXMJ0ls7Q2VYhSn1hlIgMDBc53pQ+xVwsDAlQSeizGacZxQKs12f8zuOEMIu6B9+tNPMdOdsfzc0lLQdrZ3+Pt/97+jROAIQZLE1jbblSwtLdJoNdE4lPpoSz1qXiYFuz7aL6u/18YiSvHokNvX3icZ7FU6L/uM+kFIu7uEEzXQVUikMUypcUJYupW6o1SfPP9aqek9OKWqCWGpglpRYjnHBqtX6Ha79Hs9TKnxXHeKnliURE84Z5PVB8/z8QIfrazI1vp5TgS6VWinsVotrWyYZxjUefKJj/Hlr/wOncU5SlVOGyJRoSKiGpFKaV10jhCmKSF2Sn+bUmurT2+1FoaTy7McW/4Co098nP3tPWQUYJKkalYmYX6iQtWmhEB77xj7StpaS6JUgesKJrlXrheANASBY80ZJAhpKJXVC/7q+MWOdrNGs+FTalCFplAKxxU06iHSaVAqRSMMqwGdQ+fCObS2zaNWGikEnfsvEEQR9VqNrMip1+sIx1Jix8MhRVGijWY0GqPKkjCyiOHN9U2MEbz15nOMRgPefrmF0gZXHxKPD2nPtMmylP2DQxzPYWG+izClnRArw6A3wncC3FBy6tRpBuMx48GAdBwT1WogwRiJ77pEgY/jCnzPI83iynrZulGuLC1Ri+r4vkecpMzPzxEnKY1GHc+r4Tl9+r1DBv2Y5cU6YAuShaUFDg8PUaYgywu0LvA8F+m64Gh6hwMuv5OgjaHbnuNgbwDC0mRdP8CUdl+K04w0K+i024xGdi1N4gzXcamHLURpMLnGEx6ucUCGqNJgioIoCDjo79HrJ8x22/bcSkGaVegKkiTNcIRDtz6DBsbjMWVR2BBP30OXBfG4z9J8m3otYjQeooydur/03I949IlPcvm9dyiKMaurVyliBcqweGKJIAxp1hv0hyMa9TrKtYwKqe2eL7lj+TX2uZ5YwU8ycbQxlNoONZATaq1dUyYIkECipJls6iAln/+NL/Ppz38RKSVX33uLIKzz+F338syPn+HChQeJah/lvv6AtJC888YLnD9/nrXnnuf+C/eSFDnvvRfz5EcfJ09SerOH3L51g42NPe574F4uvfsmH/vo41y98hZ5qfFDn3PnThN4grfefp1bN9c4d895Xn3ldd5541nanUVCx+Hqu2+T5hnd7hJJphiPY7y1DbbyW5RphiMl43EyteVXqTWiOTjsk2vF5atXIKjjeAH7u1sY4fK3/tZ/yVf/1Z9Sq3XICkNeFBVy6OC5IaXKULrE930ODrcwZcZcZwZjMuaWz3DfI0/ywvMv8Myz32F7ZxvfD2i1W2zc3sAPqvdRFDiOJC0KoloL17G1yrWbqzjCsLuzw9n5Jh97+G5evLTB7sDWU6Ph2K69jrRFveeTlQVZnoHROK6P57oUqqiYLMJmWmltjTqqAa5RhizL7fDTMYzj1K791lnCmgYoRVYU1BstikxxcDDk2rVVhLCaVw00mncjvC6e57KxsUYQ1PDyDNeRbN2+xmD4CP2gRq3RQGN48eWXmZ/tkKsxc906165u0TvYR5cFvcEhTqXrWbu9htaaXn+A53gVk0IyjhOK0lrJCwPxOKkGiCFpmqJUwnioCMOAILTnIvAjlFaMRyOiIEQ6DnmacXBwWA2j7TX2XI/dnT2CICDNfnHd6H+UecDPy5WYiufMkVPZpImZ/q+aHtshvZlu6rqaRigDi6fP8tBHnuL5p7/LYDxixhgrXN+/abMh9HT2a6kvVeiWrC6+rEKCpHAwFaXDqagsSqvq1yqD1WAdn6ox8sRtypGy+hOL+khhmySlSxC20LI5BBpVZiiVWRpZhTRoU4UjyUljZw0NXM9Sy1zX5tSEvo8f+NUZtM3N8vETLJw4gUGQpxlpnpFkiqLs4wQ+XmQTzu2ls6nRFduoCqRybbaGsNOHSamktaEoclv89Q9Ye/cdsjjB2hk3+f2/+bdZObVIPx6wf9gni60WxmhFFIbWQz4KUSQkw0P0MOfK229w5coN8rwkCEIazSYIQTwakaUpLRyafg2v1Ub7LqO8BGMbwsl9VNW6U+bPnRbithiuwHxl9Tq6VFVWgCAZjRBFydzMLHmtwWjQo8hzG5ylrMh+wlu1zagNz+wNRpjSuoWhFR//+Kdoz3apByFCTII7NZSq0vY4eJ5H6Ln4zuRaWYpAHKeVHa9tmNMss4VRZYtur7UV6Ia+VyE0AldCnuUMRwmO41IYwbXtA4rqXJw7e5rHHn/EGltU3FKD4Y///j9ic2cHN4pIKr1UEIYsH1+xG8VUn2QHCpNncoLkTSlpFUlikkOlioyd9RvsbtwEbZ+pCQraanUJ6x2oErhBIoWDI12YUCPlkR35EbJRmT1M92IzRY0wE5SuSimX0mZGGWti0ZmZYdDvQWkNEOwmr4+aCGwzJKWDcBz7+o51lCszNaVvaKyroqMtsttqzPCRj3yUT3/+C6ycOYkxVuAo7M2HWzmLOcKp1iZLZdPV9dVaT533JgiNrs5XFEVMcarJ/VxR5maadTqNujVTUXbRHo3HjEdj0jjBGMUkB2Fy709+1doiNmVZ4PuhHQIoO5CJx0N0nlvErGq6pJDTe+ZXx//wYel9xjovCYPnWMcvgUICrrSS7TItGcdDfM+1OTfSGnUkacawP+RwcAspHRYW5vBXXFuQaM14NCIMQgajEeM4ZWt7hySJaXfaOL5HkSsGwyG9gx4//vHTrBw7xsJ8FyMv8OYbb1lTFyFo1iLQCikd+ocDojCgXqvRbc/ie541vilzOp02WmvGSYIRlpKZpzlaK5rNGmlpkb1SJ2ilOHfmBJtbPQ4OeqwsLSMQ7O3u05npUJQljXqL48fP8JMfPs1cd540LUiSETMzHYo0BZNx4b4lDg+GuK6PkAXd+QDhONTDkNlOF6UctrcHGJGxf7DOysoyozhDlZo4ja31rjGUQzN1VHJdj8APaTeajEdjmrUao3Fi6cVKU49qpCbloN+jrGjMg9EIz7e6nzjJkJ5nJ+Vpgee6xElCvV7Hdax+tdPuoJQiThIcY9jd3SEej8izlH7vkNdff5XxsMfe3iZB2KZQmnG/T2BcpBEokdFstUkaTTozc5YynCscz7O47CTvZqINrBgrE6mOqJoUPRn42AkrVX48YAd/QlgNjtD2z0TFI3bcAMexFPV7Ln4UibWpdzyPoNFGS4/Pfv5Jvv7Vf8atq+/Salsb482tbYoiQ2YJ3/nGN2wu2/wC9bTAaMOLP/keWZ7xwx/H/Mavf5GtwzF7u5vs7G7z8MMXef6F53n7jbdYu32bcjzitRef46mPf5H1G9cw2hD6IcPxECcIEUnCqDfE81wajYZt0itbZyEEF+45z2g4ZBinbNxco1OfxfMjmq0uRRqzcuIMB/u77B3G/MZv/x5C5KxeucTzzz6PH4SEUYQqPeo167h54tgy506dIo5T3rr0Pl5YZ2dtk7UbV9jcvY0nJc1OizSzphiu69Go1xiNxjZcVTrMzs4TxzHj4YC93T3yIuXtS1eZ+cyneWhxiYfOzvKd13coiglt3jrkub6PweB7HqW0mYYScFwJ0qLp2miy3FLcwiiwUQGFnqLvWrsV9duhyAqkFtZIqYpPyJUiHic2KgGL8CllKfdGaw4O+4SzJ3nk8afYu32Da9dv0p316Q/7nDl7nN7hKo2GpD/s8dV/9c/47Oe/yO3Ndea7ddIsZ2n5BFFQZ3dnk2a7xfWrVymLAi/wUWWJ4zqU2rCz17ND7ErX7jqSNEmPGBOuHa57vstgmBISoEpFFIXU6zWyNCVzPYq8wBPWJVeUdg3zPIcoCsmzgsIYalGNKAp/4TX9f1Qq2geOCn6bFC9MylkBTrWBT9K+hRAYiS1ijebJz32Rt199CS1tiKDSmiwf2+m9oBIvV3bKxkw59HbSb4tQkFXTIxHCmTBv7JQEbCHmuEhXYJSuJraW/jRZjKSUNpSraqQc17HvW6ujKbJRVmTN0cRWCuuGJip7a60zhIA0wfIyJw2dgSiY4eTyeftawt7MzWbDIga+h5Ru1QhqpOfhui5laUXwE/cM4VQNgRHT5mxybjR20l2WJVprBgd7rL9/iSxJGI9jFpfP8Df/53+bRiPA+C7b23sk49hO35XCcSSO65GPYvZvb2B0gc5GbN68zqDfRxuYXVrGcSTj4ZB4HCOUpuX6HHcDIj8g9nx247hqYI4a4jtDRifWvtNmt3K7mmQIQOVEV5Yk8ZCyUES1iO7cLOlgxGg8Ji1KsjyfTvMnom23utbNRofFhWMcW15htttlNBrR7XRo1COb6G5sJghTKpwNjPN9H891kBgw2uq4jCbPVeXEpmwDomzT5k2+X1rdQ+T51ALb4IBt0kbjlCJXOK6HdByur20xzK0oP3RdfvNLn0dIF6UKDLY4+d53n+b73/8JbhCSlwVJmtJot5hfXLSNyKSpMOLnnN+j8FGgQgHsZx3299m4/j5ZMrQGANKiLH4QUm91cbwaOA7SscYEbuXNbzNqbJMvhcH1vSo74oNDj0nzNKGhiarRseuBXfzRBl1ae1mBtW9dWFxkb2eHNEtt6OmkKRJVkG0QHlHcBOhSVy5odqMBA0oQ+HWOHz/Nk08+xUeefJyZ+S7KWGh8Qi2doCaTRqUoLbyuKwre5L1PmiqDtnRO153+Z8/xz/KALdW0oocqQykNridptuo0GnXyLGfQHzAYDCwNbYLiTD6XVmA0qiwQMkIYjSoyyjwj7h+gs+yOn1sFJv+qsfmFD7tvSHRlRW5dBy1yL7DhmxKBCHy2d/bZ29nl1KmTjEaWLowA6Un8wKMWRaRJwrPPvMDc7CzdboednV2ajTrNToskL1haWqJUisFowPb2DlpLa/mq4WC/h67WkFGW2sBKpZifneXuc6c56B2QxGMaUUgURZSeohYFLC0sc+3GDcLIuhNK6TBOEoSwBiWFKnFdH6MEw/GImXYDYRzm5+YYjRPa7QaNepOdvT2boYZ1USoyw8HePuOhNY/JSkU6SsmyhFarAfh0O22MyVlenif06/T6OyBS5pdrzHXblIlgfW2IcDyUhpnuAmkB+4cDtNKM4zG1KOKwb4vfudkuQkgaNftsxHFSuVQKKBVBFOA3Q27cXKXVbCKEodGsURSSPM8ZjRMQDkWpMIWhXrdGC41GBEiGgxFOZcaTBBlRUMf3HMqyZHdvz7oPGkEgDePhPo4QbK5dJ9eCzuwxFucW6CzP4nohwpFcufo2h71dGkHI7NxJPvTIR2g0WjClnN7BPjhiy8Jk2GQMaopkHxF4bVaEQOoJ+8Vad09eUkz3+aNAB6p97LEnPkJeKA56h2RlgfQ7NLtnUaVkcNCj22zjScmpk+d46+3XyNKSJM9xpOJgbw/X98nShJ3NLf7ym9+mPT+PdAzvbd0iHY1YXFjkxs0b3OO7DPsHmLLghWe+j+dAPB5TloosLyjLnDAIKYqcNEnRpRXTCyS+75FmKctLS2wLwWA45t3XX+batav87ld+D60ypONy7MQpEJr7HrzIwomTXLj7BG+++CKeF1TyAYHj+GDgqSee4IEH7uHigx/i2R8/x/Ovvcrmm69w89J7SGFQhUabEm+cMDjs47kuaZIxGscEnofr2nv00YsXOXX8GO8Ne9x17oyNgzAOIgzx20t86uG7ePnyLtptkKYFaZ5Xw281HeRrYxDK0ugtTU1bt9lS4QUeUS2qtLdW+lCr2SahyKuw0kxPB1XaaHCMpaIe9hBGUJQaz5O02x3LSskzpDEMD/e4fe0a+/dcJPfmePyJu3j7te8Q1ZrMdGZYXbvEjSvv0KrNc/FDF3jrjVdotOZww7Ocu+tB/KBJf+dN6m6HZNinLAqCwCNOM4LAZ3FhnoPDPlI6BEGIRlEWJUFgHV37vQFZliOMoNmooXTJbLsNSMugMoJ4HFMUudW0ex7GGKJaHSFtDTuJOlFG0axZzWD+S9Cr/yOoaH81avPT32dRGodJY2A4emgrT6HpS05EwUrDzOIiD370SQ5LBaLyiheVm5o+mmyYagpiy94jVKgobcFVlrYwF460icq6WgyMdS+TrmfdQJRGq9K+K+lU7k5W5AagKJmYC2Cs0xZKVK4nHywgq9/cIXM6cleyiNXkTNh/W6rSOqgAwmj2NjcxQhAEHp7vW/guCPGjiNbsLLXGLGFgaRHaWPtfow2lURYxSMYVxC2nRaYwgNIM93e5/f4lRoMhSZxy3/2P8rt/+HsQGJTrsbW+SZFlVpcQp0T1GnEcs3b9Cmma057possxr7/2DukowfVdlk6dotZq0tvexZUu3VqdmnCRBlSrwahWIzVq2mxMWEhmem6OUK3ptNrYjBYB0wYnT9PqhociTnFcwfLKMWbbTQLPY2N7k1u3b3N42KuQBoHvR7TbMxxbOsbCwiLtZgtHCCSawHXoNixHHKMxpamKfay9uCvwPdcWCVVzZYtahVFH7jUWJXRwfAdfCjw/wKmQSSkg8BwCx7USEG1zZrKsckZzbDjcdm/A2v4AVVEUPvaxj7ByfIWimHB0Bdvbu/z9v/8nlNVzMx7HdGZnmJufo+K8Vajl5P67k2hmH7Jpvk91B+oiYXP1GofbtxHCvh+jDZ7v0ZhdxI0aICaZSyCEa3+tcmJtk68xRqEVVrc2+d5pE2KfD3ueK3tTQZVDNJlQartBa9vwBFGNKAwRjsB3l9nc3CTPM0vzrAp4UypykVcNuN0sVGmf8TCoMdtd4Ny589z3wP2cv/duuotzlVaoQBtLWUSIakCq7qDEVs5FeoJs2aHB1BilKogcR1bF7wdpuUdW9AalLPKndJVpMaGtYY6QH6VRZYkfWMvPotog71xPVGnXvgl6bN+jRYyNUjiY6boy4Rgq8avG5pc7jhryybUVQk6ZBbpqcE6dPsHi/Dwbm5vMdGdotxrV8GWWLMsYDsasr28gpKDZbLK9s0ev36fVbOAIB98P2NjaIR4nKFNy5foq8TghTSxFJ6pFFHlB4Ptk45juzAyHvR7tZpNBf8ip4ye5du0qnusigTIv6LSbVaij5qDXI/Q82q0OwrGorbXvV8TjGFV6SAT7B4c298tYjZsxfYajlMWFOeJRTBDU0ApGwwFBEDLs9Qg9lyzJmJubRemSZqNGnqcIBEpLdK5JnSHG5MwvtvADULltqIIA/MDFcVziNGZvZ5c4sY1bGIaWfue4eK5PXlhKUZJYl8Q8z2m3msRJQrPdIPAD8qJgZWkRR7qUxSGFyinLokLRIE1zylLhutYooVlrEIUBSgkO8z4LC/O0Wi1cx+PYsTP0Bxk7B7dwHZeZmS4znQ7GcVm9vsr+zhajOMbxXE6duwcZ+ETtNmB4663XuXLtTcLApem6DAYDzt33AJ4fUWRWOxGEwdGgojp0ZVBi92eBmtjTTyfBR9+voGKiCIRWlbbWIIzdC+4cN8tqfZ00Ot3uLForzt1zF8+98AOe/eHbGGV44/UDHn74w/R6I0ajjLN3neLgcJd42MOUOSsrx5i57x5ef/V1ttbXGccxc8vHOH3yHPu7e3zo/g/x3e9/n/cvX66segOGw7EVzGep/VzS4AJJYZERz3NxHAffdUizgmZnliTJuHV7nVoYIpDUI5f+4Q7f+cuvUeoSg8Pm+i021td44JHPEu8P+cY7/4abN26gTUmSpORlwfzSIqeOn+Ty6g22evsYr8b19S0++tTHeOXlFzg8GCCEJAx85pYXMConHo45PBzguR4CcF2rww2jiAcffACl7P3U6XSJooDDwx5eGCJbXe47u8HFM9dYiwNGo4xxkqCUplA26H3CLDHGrtHGSIo8r4bOlp2Tp1m1rpdobdHEIAjxPZ+iKCwjwhhwbONaq9UZjcY40rG0LFkyt7jC8olTPHThbr777W+iJdAb0Wr4DA7WWT59F09+7CM89fi93Ly9ytb2Kv3eAEf4jBhx7vQJNjfWUGR4geTHzzzP0sIst9fXaYUBGs2x48fIsjFXb9wkSROyxO5RViObV4wGRZqlVai9HfKFodX3SWklEuMkxnM8ylIhHEEcxwhpc7JcxyGqWbqZokQKSaNeIwojitJSr+90Nf0fOv4jXdEmReqdE2Gq3x9N3S3CIo54RmAns+bOqUR1CAu5Gg1aCz7y2S/w2pV3SBDUmwHNuFZxFo2lWigL3RWVnkRXCeS2Iy6n2TXGGLLcUrDEFLGxvHVRZZ4YbfnrYH/+dMCCmArYpZQoWX7gs8rK+c1O1CcT25+djv904TOhX1m6TBXEV703XaEGRVFUHMyS0ShBOj3SLKcsFH5oXdtc18X1rPe67/tIYShmZixqMWkCq3Td8WDAzfcvWWcuP+Izn/gSn/nixzCuZpjEbN66het6SM+hUAVB5HP75irjUUp3fp7zJ49xsLvO0//+L0nHKbVmncWV4+C5JOOEdDDCQ9DwAksZ8z16poTcWic6lXhfWg7f9F6xKJ3VWmijp02D1trqFO4oHCd1O1JY7/bNdZJdh9F4TFyUxGlOo96h251naXmFhbkFamFYLSyWbiQB37UCT6v/mNAS7bRdugLXk3iOg1M53ilTHDXSd2wjvu8jnWoyM8koKUtMdW1C3yWsFvM8t1ayo7GFal3PQxtDkqa8d2uLrLTd7bFjK/zap56y1s66nP64P/4H/4SNrR28wCfNUhaXFmi0axhl7wlVKoy2n9NxJJ4rp8YFQjq4foR0LXVL6ZLe3jabq1cos7jSjUm00jTaMzQ7C0jXw4gJXe0IajGTxl5bW25VDRG0MbjSted4alQw4RgaPCltfsMdz8SkuZ1QV2V1a2hVVlRAu5Z0Z7oc9nqAA0LaQUSpoCjBs25wx44d48y5uzh31zmOnzhGZ6ZtLY+NHUpo+0Nt1g4SVVotxU87lE2eXVENOew00L1jUTUV0GUpZVpb23dtjuhpH3y9STN/h1315FsqcWlRpcNrbcizDNc9WpaFEFV2kZ6uTZPmyOgj62g9yb+a3KO/ckX7JQ5TIRvOdFhmLcStBgIzHZuhBQSNiOMnjrN6c5VGo46QhniUkOfWaOPcXWesbfTaFq7jIYTLe5evWt1lGLG8vIwULm+++w5JnLF2a53haESjUSdJUuZnu9TCkKW5ebZ2dqnVouo+k1y7eo2Tx4+zf+ijdUm/N2BxcZFX37zMYBTT6bQQGNIiJctzGrXQiuuBLM1Ik8Q2Xa0mrucxHuXk2Yh210c6hp2NHTwnoNupVbRZh8GgR+QFtNptxvGY9Y0NTp44Djj0egMwivmFOZaPn2B7c9fmWgzrjHdz0nHKKB4yGpe02i2kVKA0zahGWZQ2AyvwyPPS6lNUYTNvXBe/GZLEQzCG/qCPdB18z6fXH9jBBIYSVa3jJYuLszZTpF5DzLgEBwOSzNBsdgg8z9pDF4qzZ04zGI4Yx2N832V98yqIiLm5GTzHpygF7bmT5FqQi106c4vMzM6ysbVJfxSTKEFx9W1eefk50jRFCEURBCSuR1ze4r/9b/5PtLpz3HvPgzSiOT700GOEoc+dS4NdJ46CxQx8AJGeoLVH36+ntLXJ8A905ZIppkOjajcDoNAKYRRr128SDw5oRQ6bxZBGs0Fr6Sxf+u0v8y/+0R8T+RG319dYXjrG9tp1HAeGgwFnzp5laWmB/Z1d9vf2OHbmAucvXOSaeBukoNGIODjsURQlw+EYKay7Za1mjWVcxyEpMtACrQxKGlqtpjVrSFOScQ/Xs7Xi9tYuRmtcP+TYyjEb3jy0hgD93gFKS6LAsNiN+NF33mKcjDHaDqWKMmU8HPPIhx9BqDFf/Ytv8bT8IZsbGywtLWGEQ6fdptFoUK83eeJzn+H1V37C4c42vd6AoizxXNdqroCH7r2PZrtNkcao0t6jEohCH9d1KIMWzeYcn374FP/ddy7h+A2cXCCla6MlJgi6MQS+Z/WUUqOx+89Ms4lWisNeH7CDlEanaSmR45iksPWG1nY9qUURxkCSZqR5geuA70tG45Q4ScmLhDfeep1xEtOaaXPs+BKtVkRZ7nDt/W2KdB9dSgqlSJKEL3zuK6SjPi+9+ip5kTHTafHWGy9x69JbCHzSM2dAxNRbAVvbKcM8p1738TzBcJhQKE2z2SZNM9I0PcqOxFBqjX9HzIIf2KZxHCf41eBTKesaHAQBRakYjWNaTTt48FyPw16O77nW2KNyU0zShOEw/oVX9F+6sfnpjXvyjFkKR0FRqEpwX9qgw0KhlKXYUOlfxPTh+ynEZ/KQa0Ot0WJ+bo61wx0cxyBdQ1TludgiTqA9y2e0E1BbHHmeg1aWs6gMlKXGqYqUid5i4mQ0mcJLKSkKWxBMNAEIbGcp7lhMYNroTCa808R0MUGMjlzhADtl+WmUq6LpTYqVI+1DJbKWAum61uFo4oDmShzPrahC9lyW2qCLkqJUiDRFYNjb3rYaG89SqGw6vOb6O29hyhIhA778e3+Dhz5yP4XO2d7eZW9rz4Z8Vcm4g4NDtjb3mVlY4ux9x0Eo1q++zXPf/w55ktOZm6U7P49wrI6nUW/izJXookRhUBXX35USIV3AJwgsJVBWYZhOld3iuu5UmB0Gvr2+SqFKYyl3ZYEqCmveUBaosiTPMsoyp98fMXJcWu0uK50uc7PzdGY6NIIAjEEVBbooAVMFxhqLpkyn3pNMFVkVsRLXnViBm4rWd7RVyDucsCxNUICx4j+nQukcaS2fJ1k2qgoDzZVC6cqCWNn3JhBcWd+ml+TgeXiOx5e//AW8IKAsSrvQuw4/+MGP+f73f0xYC/CigPnlWe67/x72ttbZvr1OzZEYD+vmRCVMVQoXizSoPCOLR5TGNmOlVmxcv4RWeWXhbKeF3dkFws4cUnjVZ74DiTR3bKiTAUG1AYtqguhWVE37jz74WDuOtHK2iuZpv6VqJit0UVYIoyoKhv0hWmFD/sqCwKnTrLVZWJzj5MlTnD17ipWTx+nML1BvRri+Uw0prB01RmN0Pn3/ngTLRVYUZUH1gX7m2ZRS2hBcRzIx5bCalqMGWJVHOh8D03Xk562VpqLK2vvAGocopSkLey/rqpA2xq5rWZwgG7VqDbLvbYIkYiZZPBUll8phqRIfTwob++svPt36T/3Y3e+T5wqE1WShDa7r4bq+RemFmCaSW5TPjkjmFhZ5/9JVonqNLM3sIKRqnsOwRhCGvPTq61y8eD/33neeRrNJUZa88+5l1tY3GAyG9Hu2YL/3vgtobdjc3EK6Hjt7+ywtLtKo3BvbnSZGaxtmXK/hO0ts7m0jBTQbNXqDPnGSMhgMOXPmJAZDp90m9Fxa9Sa7e/s248Xz8H07HFFFgTTgeQFFUVJvhCwuB/T3E0bDkc2V8Dy8zgwH+/v2szkOx1aOMR4nGANeEJDGsQ3h1ZCmGarQjA4zS+PUlpbr+yW7ewfUaxFlmTPTbqMbdeI0o1mrMzIxzWabMPD4/7H3X9GWZVl6Hvattf2x15vwPn1WVpa3Xba7q4kWQBCGhEgIFCVx8EHki940hjSGnjSkoQdKJECCIghDNAA2m43uRluwutFVXTar0mdkmAzvrr/3+G3X0sNc+5wTkVnVWaT4hNpjRMSNc8/Ze5+991przn/+8/8XO4v0Bn0mkzEL7TZJM8ZYQxzH9A9FPamsSqpJSRREWAuhF2MrS7PZdP2YhpXlLjdub0v1V8NKa5nReESvP8DzRdY3iHwGwxH94TZLi12OnzxHWeaMenuM8owohKXFY+zs7jCeDLny7ptgDYFXkRUZdcN0bi2VLqhyESt6eO8Gpij58pf+V9IoPsXEajhnHiKrX5m3tcDNofUcrJ11wwwswb1XWxE3qWrQyDVeZ+Mhf/avvsm1d9/hIx/5OP/+//b/yH/2//6/UZiUzWPH2Nrd5VNf+BLXrlzj6pWrLC62ufT0U/QOdzlz5hzvXn4XZRUrq2ssLCzTbCRsnDiLtfDd7/wx7XaXTmeRIEzIJjlFIf0XcRgQjiW4b7elZ0oqE4aV5VWpfvR3aLWa9I6GKOuRTSCM2zSbDdrtDmEYcOO995hMJqR5SVVZ/vD3/znfayds7zzkxInjPHzwiF/8ha/wyqtvoAOf3/3DP8RDsbl5msO9A+7fucWDu3eIooiV5UVOnz7FvYc73L71gKVOm+H+Ds1mzGCQAUp6s7XmUx//JLYoyYt8ClblZQGIWpnRHrpzjIsnO2x24cqjAWkmSZHMulrsPLDkLs7TRhQ5k0ZCaUqyLKfVauL5HuPxxFGzCkmCAN/3KSYpQVALZFmy4ZhWq8Fit0EQ+Bwc9Fjotrh05iR/9qd/iij/ljSSJkppytKwutzl8rUrPHXxBZIwYmF1kzMXX2T37nVOnTqL8SJOnjpFu93i5tXL3Ll1m1HewytLxpMFjp98ilsP77O3f4fFhUUOD4ZYY+n1ekRhhFYS81Q1o8GpM1pEoKOqSlaWFsUfK8/Bg7IydNptmknM4VEPBUShz+7uHmVlxaojzWk2ErRWlEWOxeD5P40h9vj2M/fYWFsHMnLDqqpypTQjLsHWTBObsqxEz7qqMLbCVLIgeA7RxtdTdFQCe+Y4qJoTG6c5Gg/ZdipoyqGUWisn91q5+ESa6CWxqPB8CVY9K43I2tMzLwzEKAukGVcqHwFhrl1TYkVZOgPCGmFWtWiBbDU9qP65ptnUPHqZiBxNySU4tc/OfMKjlecqBkw/ayqp2JSlQWkDStS0fF9oUZ7ztJHFtv7ezqzSip+FKaWBsMoLKhQP79xi99EjFhbW+Iv/7l9n4/Q6kzLl3p07jPojFrqL0sCNYev+A4KoxXMf/wRQYoucwf59vvfNf0lVVqwfP8ni+poII1gonEy29X28ICT0RA0uSZokjSaNpEUStwnDCOX5Ui1x0sjCGxbEvpGExKFH5dSiMFAYQ+maSmt0SuRBxzy8fw9TlWxuHicIRIDBQ+Fpl9BU8jllmSZOnpbEBheUGvcsa0/h+RpPSzWuLAoq9546qBeRBo1RmjBJ8AIPk6V42iPwtQxwlCig+ZrA+bBUpUPkq1Lc6oOAwoqa0sFwxIPeEB0EoDw+8cmPcvGp85SlmfZx7e8f8nf/9t/HD3xWj69SViWb68uY8RAzHLC+ssB4Mhb+qTtXa5GyeJFjrUF7Ps12TFVJhSVuRFx65lmuXXmHqiyIG02WVjcJohbGURdr4F+hRBlOnloZpzWDtH6WqXGLmjPuqHHMpEk9z0OYhDVQIH/5vo/viWdGnmbkhdznhU6HEydPcO7CeZ555hynz59l/dgajVaCxVAWOWlekRUiYV5V5dw5M02shJqiXfWzoLJyno6vMa0w6xp1csmatdILNe2NqSmdDggxRnr/pgmyMxW0jl5Wj/nKUc2kT8Z9cSV0R62D2ZxiLfu7h1z/3vd46fOfRDfbUhXTs76fqu7tU8bJic6CpCkA42AS+wG9Pj/fPnj7R//tbxCGrtm/vs9Yl9zWin/Se6Y0eDqgzEXUI01FnrimXni+TxSHdBcWaLXaGAVvvPU2N27eIYiEZlVkEkilkwwvCGg1W4BmeamLpzTa80gaMePhiIODI7TW3Lx9jygMUBYebW0TeKKQ9tTTT6O8YCqAEYQB+wdHtFsNlFIk3QXx4SpKRsMB/f6AlRURG9CerFtb24d06WCNz/7uNsNBhh/EbD/cwtiK5ZUVkkaD4WDAudNnnEJRyiidEAQhaXZEFMVTddA8ywh96bcLwohWO2aU7uD5PqWpHFinacQJzWaL5ZUVRpMxDx5u8aA/xFa3CHyfhYUO6dgyHk+IkhCTV9O+zla7hXL9spM0pzfskww0m2srzhjaQ1lLO47o93s0my1K37C9u0+SxIwnE8qqpPeoT6vVwljL/t4Rw/QyaTbB09L0XFWa0XibqjLEkedQ/Yq8Mo6+JLTrOi4qq5KF1iKMx0xGIx48us+J0xdpNFo1njlXcbGPARhSrWH2cx1yKFDWTin38pIinaS89trrPPfcs3S7HXb39vAA39fcvHmTTiPg9vUrPLx7jV/9i3+R2/cf8tVf/Mtsbd9nYWWVbChB6rmzZ1lc6FIBKysnaMRd0olhobMmTesu7hoe9fm1f/APZoJLJsEC6bginRTEzkupzCEIxZtIhQ4otgrPt/R7Rwz7MgcepROwPgbL8srqNNbKC0teZGweO4llJsGvrAg/rays4GtFErXYPhhx4bkXKdMJe9uPGOQVrc4SWSZeegJEGe7efcDNW7fpdLr84je+yvb9lKoQGr94BUov2+bmJmtra2SFUKxcxCgS40bmBKsgb6zQWVjiix89weU7l7GIAEGvPxTwUtWpq6XIc+I4nvZHp5nQJL2GVGIajYTJOKUsCgrX/lCvTZWpyIY5URyitSbPSg4PB7TaDcI4pHewz3e+9W3CMMALfOJGQhD6DEdDskIo0E9deorNjVXCKKbVWmRv/4BWs0OeFXhBQpYf8Pa773J8dZGDfp9TF5/DZBMePdjBKth+eJOqHKPKhE6rRRGV9Poj0vFYgH6gGccoJfNPfzAhzYYic5/Wxpol/cGIKBb7jcOypJls4nsei2sLHBweiKF2nhIE0o/s+4FUmbJ8qs76Ybf/CVQ0uVhlWc4a1e3sJiplUcoAEpzViHc9+VtHv8jzHM+XpMJpKc3eqxTK00REnD92lrt330Z6XETtR9cUGVWro9nZPGFq35D5AMc6Go00S0tgYwkioZiEoY8fKMpST3nwlbFOSF7mIK29qcNyURZTF+Ha38NUjj4152mhnb+OcgFTvW9AaCxYR2MRpqxSiqIsmWQpusjRqZ4urr7v4SlFFDUxRgIj4/l4vgRuvqOoVBVgZDD7nhgRnjl+hqWgwcuf/SzJQkx/eMi923cwRrHQXaLMcw7297FoTp5/Ch346KJkY3mRfLjDN//p7xN4IRvHj9FeXCbPUopc/IGiKKHVXqR9coFG0qDV6hI680OFq+SVFbaqXLm8nrstyho8raTEaw3FuHBou6UsSjE9UwrPCkXw5ntXyCYZSaPB+vKaK9FW094SW1WYQoJLrJXnSmu3mAiKX1UWo+U+KU+qYL4foH0N2hPqVhAQRYnwaT1Rm9IKfM8jbMQ0ux2CKKT3aIf8YJeQAmUqfC3+NIFTsamqiiwr5HmpOdVa4Yc+ynjcuv0Q68coC4sLC/zyN77iEH2hTymt+W/+v/+YBw8fcfLcSXGkV4qjvT2C5TWWN09wuP9IOKraw3PPuHipSIJVFhVZnjFIJ4RhjNKadDwmaiRsHD/NaDSis7iC1sEHVB1cYoJySKEMqilVyw0OoUPJ9fE8PZ2Up3RT912iKHAU0RgviNBWYwuLp0LWVpa4ePEMl566yJnzp1jbXKbZiUUJ0X0n46ofxojErqcVgRMesKac0YgAq+pqqowpa4TYWDPKdF1lc9LVxtb7r4ONxxHVumoix5d+vMolrGUN7lgzBWjCab+RlaQZ6dOYVohc8lTPFcpYTp85w83v/JA73/8hZz/1SWg0wApPuzKzfivrzqesyikgMduv3LXqZ1gE/nXfqhLyulehni8QYMiYWRVfKdfR4JJazxP6Jo5LP84yGeMKtnb2iOOI8XgsQhXG4msP6yTHte8RJwm+F2Ary2Q4pspy7j54IK/7Hs04IQ5DyqoiDsRXzdNOPSgKuHn7Dp2FLtq7y4njx7lx8wZFVdEOfFqdDvt7+zx8tIMxJasrS4zGY9ZWVoiiUAI/ZIwsLCxy1OthlWZ9fROlDhiMJzTbXZSjn3a7XTY3Nzk6OKDRSBgMRzQ7bbCaJG4wSTP2D3rkmcj8Lyx00UHEeDKmKCe0mm3x2Wk3iaOISZrhBxFJ0uCty1coyoIwErPs0WhE5ZXEqyv0hwOUUkQh7B3uoywMxiPGgxHNOGFpaVG8VbCEoWY8ykiSmCKrqKqMY+urLK9t8mD7gK29A44GE6JWi2F6hF/6oDXjyRjP9ymKkvHBBC8QlkRelo4aX0rMYY2g58a6hukKpQqSOMJzRoUWy6OtbcQE3Ofauz9ic3OD51/4pMyZDtRQar4PUjnqYy0CIHTw0WhMs9lgPJqgtSYIwymAa01FNuzz8N51ynzAxrETrCyvcvfuLe7duUU7aeAvd/nyV75C7+MfY3/vgIPeEaNBn8HRhAcPLjMaDoXyPh/HuYxq6oGnZkkbOGkSB3jVVgwS94CyFYYKZRWq0vLsUBssG4nT6nnPgLUSxBsDtnJgtZ2pZVKzgVzUILmCYZTnzpzZp8gK0rxHoD2WFjdQGqqiIGm2uHjxGRQSS2RZRpbnhJFPOhjz3vX7HBzkZLmA4MIIqXj+xY9ijFQVjOvpnYJb9YpgISOi0TrO0yfus9r1Och9dGBZXu+AsYycOFCn02QcSFyRF2JDgAXP88myAmtSam9H7fsEDkS3xuBrH1OaaSXG9z3iVoIpC0ajlCASYaQ8z4kaHTwnWJQWOXmWorRmYrqcPnuWB3fv42vNaw9/xOmzT/Ox5y5iy4zbt25wtL/PwuI6T3/ko2xsnOBb3/pTURr1NEVVsNBK0F7C4dEhjSShCArCQGThrZsLV1clmdzbOyBNc+I4YpJOaLUaaCQ+aTUbjCYTfE/TiCNGo5FI4I+kOtwbDAGIo0hiOqWEdeQJ26H8GQRxfuaKTel6WOrsSWuFtpajgz1uXXuP965c5eHDhxwc9iX71Jpmt8X68ROcu3CJYydPEoQhVQVFXoo3jO9PlZEeo3OhWVxY4cTKae7eeZvCN48ZVCm8aYlHXrdYWzeBzqhj9f9854PifjM9TlmUlGUhFSN0nQfNBjWyoHieVEc87c+BKcpR7ZT7/OPyunXTMLZutLZT6VitxUOl3pdWNe1FO1kFixh4CdUuyysp9+YlpQJUBlYSnygM6TRizm6eZH2hjaeBdEDvsEenu8pTf+kXuXz7FkdHB9y/fZsgaNBZbHG0t8+wP2b5+HGa3S6eNawvdOm2Ggz2t/ndf/bPsDpgaXWZKEjwrU935RgLy6ssdJdIko7QNYzFVMWMelNW2EroVLU4whRDdhOip8XfpXJ9QEwnELCqlgqWvqne4SHjScaJk6cIPQ+F9DZp58xsnJIGZqau5h5a8WhRFjyPKIkIogjl++goJGg0CBtt/EYDHYofiheGeIH02NipXLTQ2ermbKsVy6ePY49tMt7ZRvX2CT13Po4yl6ZiLlUn/goItEccRly+94heYR3qD7/0y1+i0+5QuLGlPY9Xvv9j/uD3/ojuyiIq8DHOyGt//4BGFOMHy7Q6C5RFAbaY0qNqg0FZXMXROM/FZyIKAuIooqoqmq02OpSy9bTSOJd86/qZnq6/BvCo+4mkF8RVvbSHHwR4nkhQaq0IQ6nc+X5MFMaEXoNm3GBheZFTJ05w4cJpzpw7wdrGCo12gvLEfNc4e3FjS8ycEIpUhuqUyhIYt+g4a5uiKKbVlSAIJPAvZOFSWiM2SjJe5NEwlKaa0TqY0TymIiWYaWW6rpzUYxhmcuJKiax8raZlrATHoqToghEnXa7U7BpXLqFSxrC43OHFr3yZq//yj7j1/e9z+pMfx2+1YSpzPe8SJIl6rao0qy7OC7X8fPsw28raihO+0FO+eE3ErKXfPdc4Jgb0SiqwVhp/tdJzVbP6cxZPK+I4xnPNydI7Kb4enu9hjXFGnUOiOGR1ZZlGMxGqZGEofI+sqtCI4d36+ga93hFJI2IwGKD8gO3tPbZ3DlnbWOPCxfP0+33KsuSNt992AZSm226yf3hAoHx6vR5hELJ/sE+rGdNIGkRhQJwkHB322Td9kkaTcjDm6PCIoirRGs6eOc31927STCImacpgOKTd7lCWJXGzSRDFck5as7K8QlFWFJMhd+9vSc+B7xFHPovtJp1WmzCM2T044ubd+6RFRpKENJoJw2yI1ppmo8HRUY8g8mk2GoyHQ2In+74YdBiPxKW8qgxFluKBVKObiQuGRH49jCNG4wmnTj8N8T791HBw1CcKI6lyedK4XJYlnqeojEh951khtOqyoioqoigQawbreuqqCowiaUQoFHlppsacVVHSbnXYWF3j45/8FEnk04jDxxIbeWam9WvAkuc5d+7fZmVlg8uX38Cagpc/9lm+88q/otWIGAxTPvv5r5I0mhzsHnH93bd4+YVnebizizYFw8EB1pR0mk32dre5eu0yvX7f+deJCqYyAiaicAmNszdAwOSaHh6FEVGcECcJzTiSZ6XREmGXRF4PPI8wDEhiMTw+eeIkxiq0F2CNqLhZtzbIcSpMVVLkQp/PXbW9KEom6Zg8y5iMJ4zGY5GyHw8ZjcakkzHjSS4gYVZQVrmwgyr5PoH2sLaisCVgZO6tczQlCodBGBMnTbTW/OiVt9A64Nix02xsnKAoJOnJK8PZcxfELxGxHzFaUdkKW5Yyt1uJ93LlETVO0GldYWM5Zrhv0J4lDAOqqmJjY4nJOMPzxa9mOJwgULbcZ6wi8H1nuu2hNCSxKCr6gccky0iaLbSy4gXjgL0sy7DWEoY+k0mOwtJpN6jKkqPDHkESsnlskyj06Q8HdNUS9x7cImp2WO4s0tAeh4dHtNtdgiikf7jP9s4O7fYCr/z4bc4d3+C5Z5/h8pXL5MMJW0ciEtVZ6NAb9PB8GSNFmYnPn/LIsoLDo56MxdIQJ7ED3izjdEKn3ZKeZWfLMh6PSZKY/mCIqSqiOCaKQ5IoYjTJhIqWZXi+T5KImMLRsOfW3A+3/UyJTVEUImuKVD1sMeHdH7/Bd7/5r3jn8jX2+0OysqQ01Yzv6QwO4RW0p1leXeGlj3+cT3/2C3QXlygrCRrmJVNxi4QSiIxPf+IrHOzv8vq738V6tY+JeDYqrfGdIRYu4alrulrZ6QLFNPidIc/WyKCuZaIlMZJAQfoT1Az5dAFUvXkOyQ/9UNSPUGIeqqW3Qm6iSNeGYVjj3gRO3hcL2vPpLq0LdxZB81vtJosLorRTmkp4lw4Rt0b8a6IwodNt0223WVlaZrXbIO9t8/DqO9y9/h4PDwfce7jN/tGAp5/7GGefPkPQvcuN925x68ZtWq0WcWx4dP8BnaUV1s9cIAwDNhc6LHebeMYw7B/yh7/1m1Rxh41jqyyvbLK8sk53YYk4ilyzv+s9MJVU72qZvqLEVOIBoWfwvSAe82i+gqLM0cZRxqa0PTvXMC3UniiOOXPuvLxXzZYDU9VBn6vSKDVF7j1PE0QhQSSiCPgeKkyI2x0aCwtErQbKGUPWlaK6z8NYg4dIuNYKaLM+GvA9qZR5TZ/O8kV2rljs8GAa/KRpQVnMgl6txLyrGfk8PDjg3bvbDEvpA3vpI8/w8U+8LLSlSuhGw+GY/+Lv/D2yfML6wnHysqyZZkyygr2dbdA+G+vLKGvZ39uRJM6prVkl11FbC1a5aookkUEccub0Sa7dfsgkGz8eBLuqEnbmKWSMno4LrUUS1/c1vhdQlgVFXpBWOUVm6HbXSMKERtgmjDyOHT/O6VMnOXfuFCdPnWBtc4lGOxY6qZ1RvQwFlLgq7xxlY26zNZqoQSMmZr4VNMxz10a8igQs0Cii0Ek561qkwlWSzeN6QtN+GGun90GSmeoxUYB5KeZ6Dqj9Y2pAZV4dsX5O7dx3mB2nmo4DhYdSljPPnsP3f5k3f/d3ufG9H3Hhc5+oMy3qfkCDcmalzO15rhr8vsrbz7eftnlxIu7nCL3X89S00ktdwKuroe7+1ui2qQy1X1RVVRjAD2RZVRY6agawWWspsoJud0Eqr/0jhuMRzSTBKMvu/i7NZkLbTxiNJvT7PVrNhqu0GnZ3d1leXiKMPMbjMZPJhNFEehm2t3ZY39hgobvA1evXp8Dc2uqCYwIUUGpGowxj9vB0KIipKWg02rSaLbKsYntnD88LCaOIxaUF8iKn2+1w4+ZN8SeJQo4GQ1rNNv1eDy/wAUOjkZA0muw+eMi432dpeZleb0Qcx+4aSWI/HE+4cGmJd6/d4P6jLSpbEcUhfiAIttaawPentQxfayaTlKTZpEgzkihmkqUEnk8YBrL2Z4aiyGh32yx0uywsdCmrit29fXZu3+LcxYvcvHmVR3sDbJUSeNBpdxmNh+RFRtyMGA4LtCcy20VRCohucOCNEyvKK/KswFYG8YzzmIxS/DCk2+7Q6/fxlKDTkR/Qn+S88tqb/OIvfoMo9p16s3KVP5yq2QyqeO/6Zb79nd/noy99jh9/7495+tJT3Lt9levv/ogkCtk8dgZdFQwOD8kmQ7oLC9x7uMX9Bw/48Y9+xGA4wlZu7XWlY2sEvMFaPG0I/IAkabC4sMjy6gpra6usrW+wtLTM0vIS3XaLdqdDHEUyJpSAR8YKdb/KpfekLCErSyaTCbaoKLKRXLtaAFXVMvVQVAqF9AxHkUejLbQpzw8JQ5H79X1F5ABuoY37bkyJmIz0n2RM0pThaMig3+fw6IDD/T0O9vfp9/oM+n32D48YjMRHrygEJBQ/ISWAqzG4WXe67ikdEschsfb41re+RbvZoNvp0mjFeEpx2BvjYUGJjHpZKTwg1R0Kf4PxRBTfqqpiMh7h+aEwi7Qcs3TxUKPVdNV949oexNQyjgIRMwJHiRNj6iLP8HyPtc019nb3AOWYTj55XoKCRhISRRGTcYqnPJI4Zn9vl0azIdfSN1x5+8eMJyUf++jHWGktYo5y7t9/SJ6WfPxTHyPPf8TRYZ8zF87zaPs2k8EeeTri8PAApQzKg929PZKkQVUZPD8gjmOGozGVKTGVZdAfCVDvBFjCMCBQwVRxWAcy34R+iIkTiqoijAPKQsDiJE4cC0rApcoYysrQ74/odAQgyib/Cxl0CpIKHvDem6/yL37jt7h8+T36aUpaVeSlmIDV5UbjFL9qPr0u4f79+9y7d59v/+mf8tVf/mU+/fkv4XtC75o217tNkFYxSPw3/9LfZGFxhdfe+RalLaauz6ayZKUr3bnEKHBqYTWab42Z9uZMm/dt7Uzuen484btLUCXvC7yQugTq+55IR86YdWAR5Rg/oDYjVNqbGSq6gFtUW+RDpjSUhVsUbUW7o1wwLpkwFYQ6oN1qs7C0TLPdYWllhdW1dTbWV+m2GyShh3ZGkvlkxD/8f/0/efutK+D56CgijCMG/SGLq6sM0n22dxMOshFbD3dJogbDwYiDoxGLa5sYFRAqzULcIB/lPOxP8GzOt//4f6SK13n2xWdZWlgmjkOUB54LpqxBqF8V4Og51lSY3JlbWkfBQ00DOVUHze7ymbLCqpnue01RnFeVq4oSrHH9OI7C49DyaZXHmFkiE/rS2B94oCXQLfKCMpugQ5/l5WWSxRZhK0KHviQMxgpX2vfxnTFlvQDVkqG+Jzz7wPfwPUlwtK6TaMPq+TM8erNPWKRUTvTAGot1HhiB1sSBz3CS8aPr9zgYZRgU3cUF/vJf/gsuiZZKhac9fv2f/AbvvPMWWTbgndf28AKfZrtNs7WAtSWPtvZZO7bJaDjg+Wef4/qNmO2te3jOtNbUpXOrpn5Roa+xniJJEk6dOc0bl6U5M0kac1VOV+F0oIGnnXiF1nhotCfJUzoSjrK2Gl1ErK2t88yLH+HcWUlijp3YwI99VtYW8DSuculSSFsJZXIer5yrFD2Rz8xtc3XS+n4j383TrrHTVVWwVnqX3PtNHaFSm9ZalJE5qnQ+VvNVmcfnobneODVLXGbeNWquUjxdLh9LMFR95PnXlJ4a9gl9t8JTllMXT5P8tb/Gn/33v8n2jfdIzpyaavvXx68rS1PunZtvnkwGf759iE35KC1zeC3pTb1OuIpk3cfp4WT2q9Ip7HnTSrOuQTItRrPG9YaK9LuMQ68ZkljXn1hWlN2KwWDApEglOa8MO/s7WGtF8j/QHBwcEMYxnVZbpKM7TRa6i0wmGcqKv1pZGh7cf8jKyiIvvfg8P3rtdbI0YzSe0GrGIsebTdBWs729w+rqKguLS2hlmYxTtE5ZX+0QBCF5Ybl5+zatZpNLF88zHo9pt9tMJhNWV1fQ2mdvtIPWmslkwsbaqgQeuVCAmkmTfq9HFMWiUOoSPYuhMIa3r1xje/+QCotRUFWQ5YLkJ0FAFAnqfezYhtBUfI90PBazvjwnDALarRbTsWzcCmItq2trbG9tsbO3M+3f6Q36ZJkh1AWlrYiTBpvHNtjfP2Bnd4c8FfUpa6XqU5aGMAixWsCL0qH141Eq8rUWMUStBBo5cew4VWUZDiYEfsDxzQ2qyrJ/NCCKE/cMWdDSi1njEaqmZSmR/NjZ3WY8OuJP/+T3xCPHT4iDiF/6pb+Mpzz29g74g9/7fe4/eMDh4T5pOnagkENcnd+V0ookbrC0sMT65gYnjh3n9JnTrG+ss7KyQqfTJQwirKkYTib0B0MODg45PDzkypWrHBwdcXB4RL/fZzQeMRiNGKdi5lrkLrgsDJPhQMaKsQwHQzw/oKwqWTvszP9LkHynKIvEdJ5SYmDt+Wjt4wVirhyGIXEc02p1SJIWnfYCC9023W6H7sICC0uLtDsdOu0Ol546RdJoEsW+0PQQxa3JJGXQP6J3eMDB7jZbW4/Y2tlhZ3eH/b09xoMhaZZRmtk6V1fBizRjfzxhb3d/CjK+8ea7NJKIRhKztr3Psc0N1tdWWV1qcWsbbNTl5GqHXn+IwePgsM+oLFFoymKmclmUpailRiGVp7HGkucFaZrSiGMqx9RQWqONIZ2klKViOB7RaDZIJxlFVuBpj9J5sOVZydHhUM5fKWwFjSRmMhwRRRGHO3sYU+EFEa+/9gpLnVVOHrvE669+l6N+ge83+czHP8YPv/snXH/r++TpgNFwQGUrGo2QsirpDwZo5TMZZ/ieprJikFs6sG0yycSHy1qiOGIwGuEHvjCh8oIoCNkbHXF4NCAKArzQxw8DgkARxyGmsqRpRsMZ8GLkmbGmIvBFddcqRZoXH3pK/xl7bBT5qMcf/cZv8if/8o/Z6o2YlAV5VVKZWp52xqOcLrRuJNchrAX29/f4H/7pP+XGezf4t/7t/zXtZssNCjVDOpU0HStPTPQ+9+mvsr/3kEE6mCZMFkFX00lKluakaU6eG1QUogOPRhRJ2dShQHY64IT7Sf1QWzWrANSGm3WTtNJS2iymtn8yDCwUFZS5MztUAOU0UEPNEBqsmvK1ra1pKR7aRsRhQnNhkV/9N/8GG5vHWT95jIV2A19ZfA2illbT1GZiDUop4qjFX/+P/hOeeuVV3nz9De7fuUtvcIQBHt2/R++gyfFzl9jZ3sPzfYajMcniCs24yUqny7H1FRTSBzPOCzqdFlfevYlpnmR94xh5btna2pPKjBWUW7tqWRj4hIHvfB+k+T5KQof026nevsZOy/T1hbMuMjNGmqutEwnQgaDgVSVOu1YLZc3TWtDzolYrs66XRI4ZNyKRqi4LsiynHGdUTtVOK0UUhejKsn/nDtHuNp31dXSzxcL6Gn4QUpXGPXZzal9aEprA9/C0JfR9ApfggKMWunqPH4c0jp1geP0aOD6odk3HUZKgqpKyqPjx9bvsjV0S7/l87Stf4PjxY6RZhjElSmvevXKN/+6f/SZZNqSqSrLxGAWMDg+x6h6eL1z0Qa+HBNIVzz77DNrz2Hl0F0zpaErKUQGl3I3VDEdjTqws8d3vfA/f6c0XRSHNjYFPEEV4no+nFLYqyPOCPMuoUos1ilacsLq6xoXnznPhqfNcunSGjZMbLCy2CeMApXF9KkKzy7OUKAqnfTiKxysx81WGP685cF5CvZ4bxGDToksr1V9TUVVqJmYyrbwYhzxa95ooJ079a9QMwXtSL79ObJ5UPJwmO9RzlpomcMaax87XfVupKjm00DKrjtXHUUphKOmudnn+61/hld//TZ7eWHE9WnpWn7Ee1hTTROnnCc3/9O0Hr/xIfELcvdXarxmtNJsRF86fo6qcYpmnyIsca0oWO138MGYySSVAcusEyhmkKkUQeLTbbdJsgqlKPD/As8IyGE9GKAVhoPF8USvs9wesLa8QJSG9Xo88L1hYapFmGWk+ot8bcePmTdbXV9lY3yQIY4ajIWkuvPciy9BYTp04zoMHD2g3W6RZirEZzWYLZWW9m0xEfjVJEiyKqpjQ2x9zcFjghU0uXbiA52neu3GT4XBEu93iwvnzeJ7H7t4euweHWCs+XZN2ymoQcNDbp93tcuXKNUpXXQkcE8PzFM12kyCM2O/3Ka0hL3L8MKC0Bt9IdTxMInQEyrmtj0ZjilKoTnEYkmclo5Fw9+MwonSSsM+cewqrFK+/8SaVKWk0EqlaV7KPNLeM01z8aCKfd969QhSGYC2TSTqdFyLnazcpxnheQGFFGcuLY6nU2FpAxbrqsM/DrS1a3QWSVgvPKsZZTn8wpDAFo3GfXr9H7UgsodAMXAEcldZw/uwl4jBgZXkF34vY3dvnlVde49at2/QO9qVvArcfa6Aq0VrTbbfZ2Fjn/PkLXHzqIidOnGRldZVGowkW+r0ej7a3uffwPj/88avcu/eA3b0d9g8OGacTibvcfBoEPn4gvcfi1+XjxwHtWHy26u5Km+Y8undPjKGtIa8szUYTUbr0gNr3TZKboqiNs/VUCRIrTA2lHaW4qsjSMelkyMHervP/EtCpjg/EugGU9vD8gCiOabQatDsLLC8usba2wvFj4lu3sbHO6XMXePHlj9FsJFLpKEqG/T4729tsbT3k3r27PHrwgK2tHY56PfGHcb4xvudNAbHxOGU4GLGzs8fb71xGoUmSiDCA7uolrCnQniZJAo56I7JcaHNhKMbSjjTghIVCqkq7ipL0nxor1bSyMqRpSrvTAq1lPStKxplIQJeVgbyYKpCZUpFW4ntlKkPvoI/pNCgq8YJKR2O09lhZWqPdaqLwMV5Fp73E1t5dDntHjA7v8+jhAw77R2SZUAGjKKDRiOgPBiLVbUoINVVVEMYh1iiSOGY0GAlV1/WNFXlBnuZiRl5a8qwkm5SUrvXCWEVZVIzTnIYNKJUIRU3GBatrCYuLCxweHTlxHTWzVkDU9T7s9jMlNkdbd/mtX/snvPbGdfpZRlYVGMR8x9cBdaNhbT5XO7JL8GCmSY1y6FVRWV794fc5Ggz5W//B/46FdlcCMYeWaU8kNk1l6B8cMhkOKCaKycAt5EoRRQlJFNJOpHyVZRlZmlPlJcaW5KkhTws8bZwcrZoqDUkH25NI56xHxsEq1G7e9TFlAZzR3uqGXl2j3c5MC2p6jMJWiiCIWVhaZeP4CU6fPsPm8eOsrq7SajUIPGlsnjWouaqXqShcU2ItViDnOH0b3aVFvvzLX+cr3/glsknK9vY29+/d5/bNW+RpiiEi0AmVVaxsbhAlDY6trpCEPlVZYBEz0+XlLrdu3mFrP2Vh1aHERqpiVWmm91WSEqGelUUhiYYR/5TTpzZYXmyj3WhWME105PLK1JjnOXlRTKlP2lco5QsF0BqoNJ4Td9CqFiKY69uZo3gYDKPJmLKfUhZieBpEIc1GYw7hdnQyIB9O2B3cormyIkpmrSaNdntaofCdKIPvOZlXDYG2aDsLIO0TAaUxFd2NdY7u3Yf9Ccqp2Xme+OI0Njf49vde4+qjAyaVUMPOnjjGL/3iF0QFzQlVFHnJf/V3/ht2dx9RVTl+EGCtU33zA4wxzjtBsfPoAe12i/v37vHsc8/x3PPPkWU5/cNHaJxHjwvID/YPeLS1zanTZ7hz8zaf+vRn+P6bV1hVTqHOF4Rl0htQZgYqqeysr21w4fkzXLp0kYvPnOPUqU06Sx3C0KNiri/KAqZklptIFaUwdYZf88ln28+S1Mxvbhg67jZi8AngVBKtkgQmz3NHK3sywZnriVOgdB1gKOpUdT6JmU9qpjRIVSvteU6sYkaBrCWX5yuPsy89uz5PXpDpGLcWY0qWN9cYlZre9jYqjKcAiVauCd2p9tTB0uz6zEQKfr79+VsQhJRlgVKy8BYFGOuhlGExCDh96hhVpXjw8BHj/mhq2tnrjekshBz1RrRbMXGsOer1xIQ3zYn8AKss27s7xEHAYDDEGsSsMPRoNCLarSZlnhIGigDLpTMnOTrqEQcBu7lUhcpShFqydMzy0gJhEBNFMb6n6bZaPHPpEkp75FlGUWQ04ganNjdJIjH6i6OYnb1djrIeprQkcUyn20EBw/6QRiOm0ehgq4Jd22NnZ5fDg0NOnjjBcDQSk2Lt8d7N2yx026RZjkFoM0kSM0lTdrZ38IKAR1s7HPb7qEoon4vdLkkcMx5PWFtb4/72Dge9I9cz4JzaLURhIj0ERUG32SQOE44OelgtY7Kylqws8ZRy/Y8CKjSSBslCk8P+Ef1Bn4XVNpNJKsmllh4ZTyu0J6pm2IDAVyRRg4ODQ8Q+XABN3/NJJ6l46/ihowrJVhSl6yN1Y84JCRksVaUIVQO/AcPBgLQsaC+02dndIp2MhWpu63lO4N3KWukj0JqyrCjyjCLPGQ5K3nj12zx6dJ9JOqauSilr0coQhgGrq6ucPnueZ599lnMXLnD82DGSOGI8GnP/4QOuXL3Oe3/whzy8d48HOztMxiNhrWgtFLNAGC1xu0HcSpj2HNY9ZNOqtoCKVgkTRTuakVaatCqmhpa2Mo76ZhzFrqLWcLNK4XsKW1Aj0bLnWfHdKTg6Dy4rvdWeD0E90ddzm4snauCxMhaMoZyM2RsN2Lp3m9fLHFNW0vxvwQ9CkkaD7sICxzY3OXnyJGfPXeDkyVNcfOoFPv6pz9FoNjFFSb/fY/vRfe7eu8vtW7d59PABO7u79IcDUTHzvVmwjWE8njA0AiB6ShMEK2jtcfrkgnjUjYf0+j1Goz7NZgxKoy0sLy8SRSHv3biD0hD4Pq1Wi8l4LKwXU5EXQvH2PE271WYy3KF0a2lRCA1NlyWFzh0QKT1LURQwHEyIkoBGnJDnOYsbJ9k8tsnw6ICDo312Dybs7G6x0I74vX9xl4Vmm1t3bot/XxBSWVF/GwyGlEVFURqKvGKj3cILRNF4XI6JwoRGlEgltTBizprneJ7HxMUornwnSmcKykpEWcajFG0VZZk7emTCaDBieWWJdJJS5EM8zyc3ObZOmP6Xqtj8g//873D17iMGuQSkoZOwS5rx1I8ky3PyvHAeJBWVMRR5Tl7IwMVKH4QEFhWVMtx49x3+0d/7e/wH/4f/iEYcSZCjtVDO0ozS6XtrrYnDGE+LUY+xlnScMRlNMFaamYMwIm52iBYEPS7yXFzry9KhLNb5YoliUq25Ube2K/U4+lknQvJIqSdoMvXwV9PGMhFYkIa1dnuBjWMnOH3mAqfOnGV9Y412u0UY6JmnSi0xa2dSz5IMycNdVZXwFmcpz1Spa2ouaOugqCKIAk6ePsmps6f5/Je/AEpRFIYsndAbjBmPJ5iyIktThsMR1liKomSh2+He7Ufcezik3d2YUqmUEh+WyqmETJXtXIN8TZ9CyeIwHKd0mtGssqEUZqqeUtHvj9h6tM1hf4gICilX2XCGpwq3EAmCFAUegZtQfAWePwvaPEcbK4sCDwg8f+rT0GiKAZwgJXV/hQSt2qnijQ8PKYuMY88+R1WULNdSoQ5dkfuj8JFzqoPVumG8spKs61paVyuWzp3n0e4eDd8TWpzvUYwL/vib3+fyo31K5dNoRMRRxF/9a/8GSbPBZJwhssyaP/itP+QH3/8BZTmhKHK0p2g0W1RVxcLSCl4YMhr2GfUGDMdj0vGI4XAo7trtDi+88AKvfL8nUuFYjg4OuXPnHqPBiFarTRwnPPXUU7z66hvs7w4wWY6uNHHSYmPtGOefP8tTz17k/FPnOH56k4VFSWIs8waUltJVZ1U9LpgOBeqH0lpLEPrkeUYYRpKw1r+dS2rq6/nnbfM0Uc+T3hnjJF91LeqAEUnNNCPPRenGGDMX97tkg5kXzHSce9Yhq45ONJfMzCc39funFV3UzHS4rvpZ9di+63GqlJlWh+tLJv8K0GJrQQCt8QNNZ32D/uEh3sqG25frL6yVlTyNntL85q4Vj///59tP3v7Wv/vXeLS9zbtX3uPR1h6FCShKkSeJogitxSKg0WhhKni0tYPSmiC0jMsBaWYoqjGLJLRaIqM6Gu1Slal4qFRQmJJm0kJrRZWIOpj2JJgeG5lXV1eX6C60sIjC1trKmguoNHEcgxWTPgYZo9GINMuoyoKDgyMazQbLS0t4WlHmOUEYsra0Rn8wQHmatdUNHm094u7dB0hS3Wex02ZpuUuW5UxyQ38wYZKLvxYW8qLADwLW1tY4ODigPxjQajXwtEeei7eYVrDQaRKGIZOioNcfyP7dJNpoNhgOh7RbTYbjMQ93tiWpMQbPk8qW52msqagMZKaEZove0ZEYiyqcwbPQTFutJiCsiuWVZVTg83B3mygOaHYaWCx5IX5sRZEThTFxHJAfHmGUYXm5wyTNGQ0ntJvxVLmyKktZR3ztqkyeSIArmaMmo1SC1zCgLESZVfxZFFVesvXoHqury1hdcTQ4JM4ilheX6LY7jMdjofdUlq2tLTY3NiQ5SCfs7Ozy7tVr3Ll1g72dbfK8wFMWTwm1UfuajfUNnrp4kRdffJFLTz3N8vIylbE83Nri2rXr/NZv/w53bt9mZ393GlgmSUwYRTSbCUkrBlx+YJGEQNVCPRJHyLooaV4NQAv91dlZKO2o+7I2l7UojlOKk/27ZMbNY1P2g9svVtcle5wEmsQwtX2Gm7KM4GDT9WRejaxWTgMHIPuy9vpArGZ1sOl7EIr3aHDE1cN9Lr/xGlleOL+pgKjRYX19gzPnznD+3FkunT/Pc8+/xBe+/HXiKCRNM/Z2t7h7+yY3btzg5q1bbD3aYjQeoVUFTm1TZKULhoMU43qzPC9hda3NYpGiVcV4kpJORpjKcufuAwfmybXs9QdYUxEnMTHCBvFcwjcaDEWMx/NIJzkoARWUoz9PJimmErZHkng0m020Z7l/b5uiqFhbPc6D23fotJqkk5Tdg22KfIynIooiIh8NiZKQssyZZCOCUJNmE1Ftc3Hxsc11/EDR6/en8epoOMFY6WmNophe/wg/CIjikE6nxXiSUlZOgVSLMMVonLk+Ru2sGnLyvERrRRRG7O0d0DvqY6yldFYqosAsNL0Pu/1Mic27tx8wLg2+H5I0YqIoJE4SLJXrZRAPEqWnuOdUEjgMY6q4YDwaUlbFrBhipXH42uU3+c3f+HX++r/zN6TM5qgkxtYPqjymjUYL1RtIQKOsQ1sVnqN3FLlQkcZYlCcTUdxozlxPy5I8zaZmSBg1J9c1K3tZF5yAxVZiJGhcLwd2lt0Y1yvg++KAfvrMOS4+9RRnzp5meXmRJArxtMLXyvVqiEFp3TQtkq2uEgLOw6ZwDZe4BNBOqwl6ii4qlFGP9QM8RvFx1w5l8TQ0myHNZgRqAa08QE+VK6rKUhQ5q8dOcGGUMewPOeqPGY1GDIYjJsOMyqaM80IoPGUupkkOoRb2oDTH7u4dcnRwhK+kwVw7Pi0I/aE/nFCaOghVUJrZhDYtXdfKZqBMJRdZiQLKLGCz1PpYWmkxMPVkUYp8nzDyiRy64Hsaz/edZ5FUZMJAaExVNiE92KHVPYcyOUkUidoK4CuZ4rWr0NUNw8r53UhpVVAuUwklrL2ySH7xImzdQ1Oxt3/AGzcecGN/BH5AnARg4eWXn+OFjzxHkRdOxlfz8MEW//Dv/xrpZEhVCQiQphOSJKG7uELUaKJ8n6jZoN1dYP/RQ7YePWJ1dZUyL2jHHdonF7h7+wYPH9znvevXONjbwfcjjp84xemzp1leWefK5TtEaokvf/rjPPvsJZ566gLHTm/QXmgThrIIVU4tB2OoSjNHeXr/vPC+IH2umqW1R5blOM/Ux34/35T/5DZ7Xc0dwd13Jco0dZJd0xXyTMCAvCxm3lrvC/CVS8iUWzSVM7l05pzaZ762NF+1ef8XF5+iwPXembnFVJgntdrc49UUmbLk+LV4yuPXwXk8YVnd3GTnyiOai0IvVMqiPZ8g0IShRxjHWNeHVk3FUX6e1Pws29raAqdPH+dTn/wYg8GERzuHfPcHb3Hn9h2iKCBJYqpQejDeu3mf/cMjVlZXaYWK4STDU5o0M0xSI3MsIsFqKxiMxgRak0QheVFMTS7b7Zh2u0FZZrS9BmVe0uuP2d47FFpKICbG4uMFvd4++wcHeF6ApzzCwCcKfMJmwnAkTeNFnqKjiCzPuH7jJu3OIo+2thmMhoRBwObmBp/+5Ke4ev0KmorBoEeWTmi123S7C5QVZIVFqZR2u83i0gKlrdje2WUymQAS7HraqZEqsFQozxInEQf9PmEgAGeSNGg1E5krjaW7uMCNO/emxr5BKKFHnucCmJmSOI7otNv4vk8vLxinOXEcsrW7RxJHJHFM0khoOKWk4WBAVmSEjZid3X2i2KfT6rC6uoZSehoLKG1pt1vkVUlelAKEGosOQypkDfUDfyqOJOuPEeDNBZ5KCQWxqhx9yvPIs9yxPyzaUxzs70xlvE8e3+TEibNcPHuWk6cvOr83xdLiIg8fPOD6tWtcf+89dnd3qcpCYhlrCPyKbrvNmdNn+OjLH+P5Fz7C8WPHMNZw7/5D/uz7P+S1N17nxu3bDAZ9NNKnEMcJcatJTHOqHlVULmlwMsA4wCT0JWETcEbmJ+Ooy3oKktayu5o0zdG2lDjJ8zHWMhqNmGQZGkizDAVOMMr1FE9D4jrOc3OqcS/Mz5V1j5CbE5WVKo5y6618RLvQa776PUPSpj/V7JBa6VOJSFOgPPANJAnNGoCyBmMrdrfv8OjeDb71zdKp2wV0u12ObR7jzIVzPPvss1y6dJFffekTtFoNynTM/vY2129c4+Z77/Hee7fY3d1lnKZYZzVmrKEqjFDbrMSpcbJIq72MVpYo6TIcDjg4OKTf7+OHgXvWnHBAVWK1JQw8ojDA8/W0H6coKhE3UprAD2gkMSCiN0UuDJ8sz8jzAt/zmIxGhKFm7yDl4GiPdDKSqsokJY5iRpMxeZZilSHwPVc1gnargzHQbDYZ9Pvs7YvqoalEAVAr7ZJ8me98XxQMR05UoNFI6PdHzr5Cqn012ByEIYe9I6IgQCtFuyUAQK3+JsqUhjh0ZuHGuLj1w20/U2KTVpZGsykyik7+0BpDZdW0z0XoUwprpCKDVVPqiPICOu0uk0wUXWoEAGuxpuIH3/lTnnr2GT760kfnoySheiAlujCIwMn7yRNfk8bqhloZJMaCLQxFNmHQH00rRX4YEEYxSaNDrBV5ljkne0EdgjASzxgnKRzGEUEUEkbiaVJ7VWijaDaXWV3b5MTxTU6dOsXiYovI9/CUSEZGgUfg+U7ByGKqSiodlRXjKzOvy6SmSZUfBOhQ/DWYG8izgaqFjoV+LIhkfmDP7Xc+LFS2xj8sCoOnDF6gCIOQVjNEZLBd0qHEIcNW1jXlVaRpRpqm9PtjhsOUdDRk0B8yGKaiRJKnjCYpZVExGeUUTlDClE45zYp8tZQpa2qdU6iyck4yn0lQKuILBltYB9m7Z8JdK42S6kRpnJ9LhadzGMxQ7FrAAZjKbfqeJgwCQt/nzoMd1h88Im412Ty+4UzyNFHoEwQhge+Laol2NEYQjq+SRj/rFkcFlFqTrC9y7doVHt57yO3tIwaFQYfilWOtZaG7yF/8i7+E0srJp+cUecF/90/+OY8ePJRKZiXOxdZYGs0G2teEcYgXBlTGojxR7Ds8PODg4ICm7/P5z36cf/Wvvs+7b7/LaNxjOByyuLzG5rFjLK+s8dwzz/PFL/0CncUlFpa7ElxYSTCUS2SqshYar5+eJwLleQpBfTvqF+dYA/Mf8DyRm/Z9b7qvaoryvT8If6zpXqnH9mmtayx0Y72ml1VVjRDWqVD91D8+Hh6ruoC7p5K4SJ/NDDmcGXbyPrrq/OWoq0bTV2ZIzAduM3qaBAJQUXPSpZF4mgGyuLrMzR+PScrC7b0GW+Q7GyPUEJTCQ2NsBcaizc8NOj/8ZlAUYBVRqFldbvGZjz3HtctvMRlB6HlUWuSge70BCi2IaS1OgyHwxDAxCJbR2qfV6jCZTAiMR+B5nDixiVKa/YN9tLbEcQBUtNoJ6USSl+3tHalwjMeEgZiADgcjPF/TbrdZXVpmOEoZDYdYIvIio6oKSlMSepqNzWXWlhcJgkAoqYMxa2urjO9MqIzh4YNHFEXBJz/2Md5863WsKUEZRqM+eVoQhgkLnTbNZpNWq832zi57+wcCGAUBpioZDvsc2zwmIipO6S2KGzQaCa1mgyIv2VhbZTyecHTUJwljllaWqExFf9jHKLFN8MOQLC9oxCFYiOOQ9bUVkkbC1tYOYRwyyXKWlhc50TxO4GvGozFBEBLFDY56R6SjMSdPHKM/HrG0sIgXeEwmEwaDkaPAW7KsACUmiFlekpdC74t8MYwOfI9G1CIvMiwV2SSjzEXi2fM8Go2GUNunfSgS2OVVia81kR+IOqsyWDdnd5sNLpx9mkvPvMjZ02ewVnF4cMi1925y7epV9nYeUbpqF1WJ51mWl1Z4/rnnePnjH+fi08/QabU53Dvg7Xfe4R//01/n8rvvSvUt0CSNBC/0WVpdlvnb1vIBjumglCQv1HOpU6B060nlKoTil+LEBxwgUrnqjbWWPJOqVBCGIq9dVeigolCKyXgsCakDmTzPd4yIuXhFWSc25Cw6bA0czzFlNHNA8axiPhcAyjxt7VSpztEwZiBOHUPWFZ7p5+t5ts6mNBgj8Zijk2klUuphEE1NjZUCW5XsPLzDvTvX+KPf/W0KY0iSFpvHj3Hh4iWef+4Znn/maT75mc+ThBGDXo87t2/x7tWrXLt+jXv37jMYDV0grxkOB6SZ+CGJgEDE8vIxVleOkWYpWT4hS8fs7e9TVYYkSTC2QGtZ50xV0kgi8cgZZ9MkTmlFmknjvaEkLwoOD3v4vibLC0pdcv299+gutlBKEthmu0GWprJuWMiyybSlwGskxHFMWVbSQzNOOTg4YP9gn5XVRbJJRhTK/KJ8SxyHFIV4+GVZzmA4JAojglDU3apKLAqqsqRSCqUtnXabnb3DqWdcEAQMh0OKUjzhsiyj2UhQShGGgauaa5qNJlev3v9QM/rPlNi0uh2SOCaOY7SnpbE5yxwfsKQqjJSNrOi8Vw6dqRu45V5oGnED3/MZjYaOduaaaauc3//t3+LipadoJQkW61DdyiGzVoz9PJ/KgnX79H0RGBBqijwUnh8Q+NqVBEVG0AtkcdJaEUUR+B5+4Ds6lBJDJITalDtztqr2HhhM8InZ2Njg/PlznDl9kuXFBWJf4SlpNItCCZRrMy9rDGVVUJk5Cdlpc7EM3NogUIIykRqdTgx2Vs2YIhOO42r1LOmp1eqUC3Bq9GImP+sG/Nx+6sqTnlLycMeys8nQoS5KgedD1PahHWCVc1FGvEFkgRd5bOtk+upnIc9yxuOc0TijzAvhpfZHTCYp49GY4WjEcDgmSzOKPKMqUiZZTpZnGCcdjtPArycya1yC5wJSOUepsBl3jaa0XBQgSaokahUeGmU1hRERh3JcUN3bI26lHB1OCKJAdNcdapAXknhUjg7p+T445aP6WhlTU/UMeZZxsNPjYK9PWVqSuEEQRLQaCcsry3zxi59gcXmBySSlcPtstpuM0zGGkrJMXWVEKlJHB4fyvR48pL2wQOjUhoypyHOZ9O7fvssf/vrv8Kfff4s8zSmLihMnTuL7Ac1Gh2/84jf44td+Ae0EAxRSOq/RtBn29Xhi8RPDczt7/+x55bEX62TA9yXg8P3GT63SPLm9vyG+TlTre2wdrVB6bTxlMUoqm5VnUEY7atdsX9MG/aoiN0bcp630AwVBIMaIjQZFKV4LtUzv/PeZLZhSLawqEZpQcw2zdXDwvss2V1GZ5jbTP1a8vFywbK2l0WqRVoY8z6aeXL7nE/ghcVQRNRoOxZH5QvZpqIoPz0f+132b78NTSpL8IPCo8hxshe9rTGVR2pfgSslC7TtzYaXEkNW4Pp0kloXf05q8rCjKnKvXbzIaTciLjE475MKFdRqNhDTPebTziDKTqmOaZqyurLCzvcugN3Axm0eR5YyKCWmeU1nLoD+g0YxIJyM21pfxPQUmJ88nZNmEOA4YjxVeGLC8tMTWzg4azdajLU6fOsYLz7/A2++8KfOmUfR7Ayo7IGk0MEDTtvA0LC50qIqSJJZj+crSaTXodlpkeUbga9bX1jDW4nkB4/GYZrNFu9Wg3W6KG3qzSa/XozAVKI0XBGjtEQewsrhIZSriRNRFj456NFsRrWaHzc1jjIYjHj54iB/4HNtYp6hSKu3T6TbptJqMRimjccownbB7sI/vi39N5PxEDJY4iNHKIwwkcPd9D8+XSks+yciynLws6HSaLC8vOBprSZ4XHB0dTX3qwkB8u4yR9T4MfOIwdH1NCRcuXeLlT3yGp5/7KM3WEuPxmHev3uDdd69w5/Yt0vHIhfiGyhrW11Z46aWX+OQnP8XFi0+hvYC79+7ze7/3h/zgB9/n3oN7WAuNZpM4TliKl6Snp+5XVgrtawLPn3op1UBw6YzMDY658cSaaGvRFGaee/VW9wJRWoo8p6EVYeiTZxkWqQSZ0gFv9fUIZhXw6bhylWcBf6rZ79zEVx97FpfYaQyiZjuZvSyDVV5zjJ0pEGXr/zt5J1vHnnWyIi4yyjohKKxLZBwap/2pNYUGCIRZE4cRUdNKDGIMh7tb/Mmt6/zhv/gtUJp2Z4GzZ8/x4ovP8/GPvsRXv/qL/Ft/5a+TphNu3rrFlSuXuXLlGvdu32Rre1cAEWA8njAaTUArSa4bHRa6yywvbzCeDOn1D8hS6TsS8M6SpjlKKdqtBFCUlaUopcoYRiG60mAtVVGQZwVgiaJYYm9jwZfe3m67TR4FZKOUdDwEKwarvuejlVRR+4MRWMVwMiHPBWBYaLfIwpDBcEwUSeycTjJCV+2c5DmdVpOFxQWGoyFgaXcajEYZvu85xVsjVepWxGhkhPUVJ+TZBIv0XoehMMIacURVWQbDIUtLSxz2+h96Tv+ZEhvP84gTmciLoiBNJ1Iay0vW1jc5d/4szWbLcXZzRqMRhwe7bD/aYjQYzrjqGlGLUIrBcMg8937r4X1+/MNX+OIXv+joWgaLNPxW1tBZXeaphaYMG+05epgrpVppFrSmwpQiMSh8XsiKHFWK5nZZVVSldfuYSXEqzyOIQlpxQuh7mBKMjthY2+Spz57n7JmTdNsxWhl8pQgDQf09z3PyhrXsMU6Hu6KsLKWdl7p9POAR194aXX08EHp/YDcXnAHW9d9M0d4a3agTmrnPPTZ5TJMbJ6Fsn6jqTJOa909Wj29SPlfOONBamRR8D6cLLYmQognYqYpJXV2T728pK4dAu+QgK6ScWhQFZZG7ZrOMSZqTphlFmrlG2YJJmjOZ5M7sK6PKc9HbLwv3WunoPnV1SFATzxP0zfe1NEEaj2yUkY9Fk96fU9GrqpK8MFgrRo+eJ7KXlVV4OsDzDJ7vE4YJjVaTbjvmwrkmnc4CC4ttVlY6tNtNmk2p/oBlOBxPg19Pe7z9xmX++I++SVlmU0WtOumtygq0VEWPdnZZ3zxOlWaCvrk564dvvMmV6zfZH/QpjciT9nsDPvryS/zN/82/x9lLF8SotBJETNeUqFk689OKDB9qkyRp9oTVfOhaHKBOaj5sYvM4Hc0txHM00JnMsnUqPgZtpNqmKu3GhqJy51WPL6W0zA3WiPKdLxXLOG6Q5xmTLGM8GlOVFYvLSxTOGHU2/h5P/KwRqoEkNfO/+YBrpCx26l9RXzDrJOkd5W9a8bFEcYwJIvI8E1qPEy2xCI3VWD1dxAVlsIgE8U8asz/f3rfVwRIusXEeS6VrVtdOOATrZPuVBCYWhe00yDJJOuMoIAh8rIFud4GDgwOZ0wpZv6zy8HyfxYUEz4cHjx5y2Btw5/YWJzaP02x2UAr6/SFRFNPYaOJ5Gs/3SCKhnIW1x4Mp0cqi9RKJOy6IeIFWHmVlHVW8SbPZJgiEiVCVJdkk5+WPfhRjK27duMFomNIfjAnCgLwcUJQF3XaHS2fPECUxWZaLH5inqIqCwloS1zujUKRZShQFKGVYWV2m3+vTbDYp84LRcMCxzQ1u3r4rPhe+R6fdIgxDTFWxu7/PxsYqnXab7Z0djh1fY6G7wM7OIbfu3mZtZZVLl55iPBlx4/Yd2p2EwpRsP9pjqbVAt9MhnaT0+n0aSUJvMCAvKuIwpNGIptVMP/BRRhEiiHGr02R4lLHd3yMOPNqtREyCFZR5weFoJEi076FdlaaqKrSvCEJNFHgsdyNWVo7zzAuf4KWPf4oTJ89TFCWPHm3zwx9/m+tXrnB4sIcyFcaWqKqk1Wrz4kdf4he+9GUuXXwKz/e5cfMG//jX/ik//NGPeLS1RRTHJK0GiyvLU4VWg1DTPc9zEvx6toa6Phdhk0g1AissiCknxIE5Uv3QjjnhAnz33mkVBwuVnSY/pqxYWFzgcD8HLMUkExFYx5wxpgR8asl7URispzLr1tBZfFL3uNbVeFVXyueymGnVxarpELVzczjTyg8O+DSgPNB1P6UkbxZAO2aHS7LkJNRU7Akn+Yx1pDgHPpfW+TXWF1ortA5YWFwk8ANQAljfu3Odd999g3/4j/4BYRBy7PhxnnvhBT7+8if4zGe+yF/41b+EKVO++c1vMhiNeOvNN3l4/4HMIUoa448cbSwMI5K4Tbu9gCkNk3TIcNin3z8iTScC4mvp4SmrjMDTpFXFcDRGa2gkEVWJ9K9ZyLKUvBDK4drxJSwV6SjFlFKMaDYTsqLAc2ahWVZQlNJ3tr2zR1FVxE6YgqoiiSM8Da1Wm9JUNJJEJKBLODjsEceRsLFKiWvzNANr8X2PooQw9DG2ottp0GpEFEVFVWVTNVUxhw1Y6LaxxpBmI0pTsbO3N01SP8z2s1VsWg201mRZxmSSMRmPOX3mIr/09a/QbLcZTjJu37xLv3/klGMCVtePs3nsJOPRkBvXr3N0eCCThRYFhmajwXA8nD2wVHz/O9/mE5/+NJHvuWdYmgVFaWLC/a2HVFZNEWLhhormPNp3g8cNFa3Rno/2pXlZYZElwLhKSkVpBIEo0gpjNOGJUzz70ed5/rmnWV9dIvQUyhp8TxMFgUgcu6CtsgZTiuxqaaxrVhYUo3pfsDgb0PD+xOWDJFt/2nuUlqqZp3zHr5+lL3XwJWg1UCMXPFHRUbMJUs5uhuCo6YsfFKTNJqpZ+Plky7J9DG2pyoo8y6a/rXXu68P4WhLNKPCwDQ9rAoxNZt+3/gZy8tQqdTjcpy7P2jowtJJgVpapBHHdk2Gn6m5S+arNImtTxsdlhTWedsaq7htP0snUyC6ORJs+DMUHR0QQhF5pbR3M465uNZ2462C5yHL+i//8v2YyHoMyxHETXIXSGo3yPMI4YWlllaWVFY4Od2g0mywtLTOZjDnq9aiKkt6oh1Xi7FvkBZ1Wm+dfeBEv8KnK0lWZZmiYVe5p+J+Z0Dy5TZ/QOelj36mu1TSDD3rW3/eY1eN7+otZUj5bFJn+X2vpU9VGT6t51lrwpaJY5DmrKytYpchdBU4CAlmghqPhtLG5ygtXHX18QZ0xH6z7Ko6aaIxrO3BjQGlnkjqTj59eHzVTP5smW/U3cxTN+j6FgUeYNMjSCdrRNjzfIwgDoiqh2ZS+xvpaSzXJzPwmfr79uZuAM7N7Yt3zKRKlUhkO0IiJai3RLc/Q/mFP1hTtoaxhYWGZorIU1V06nQ6e0pR5gcFSVjkLLY/VlVj6IKwmCRusrawRRQ0RhPA8BgeH4gvieXQ6DTqdDvcePCLPUrqdBFOVtNodiqJgMOixuNBmNB5jK8P6+pr4qinpC3y4tUUUxeRFwaOtbZRSjCcp27v/gksXz/G1r3yN3d0DDo4GjEZDdnd2OXnqJDfv3qE/HHCs0+bh1jZJnHD/wUOqSipR1loacUKr26Lf77G0uAjKMhgc0R8M6Pd6rKyssra2SlWWNOKYZithMBphK0N/MMBWhij0SeKIsixZW10ljhKuXn2PZqPN2dOn6fV6XH/vBiDre1XBcJChlM/27h5J1EApzcrKEq1Wg6W0y3iccXB4RGRCNtZX0Z5HWZV0Gm2MtSRJzOHBEf3+CD/QJElIp9sGpUgaCZ1Wk6IsODwaTM20u0ttjMmpSkOn3eHTn/kMn/7MZ9k89TRB0mYyHHP58lXefO11bt++RTYRp3lTFQQenD97hi/8wlf4xKc+RXdhgbv37vEbv/XP+da3/4z7D+4RxTGtZpPF1SVA5mZRpxLDT+1sMOrkoywLSiMyu9Pmf6WmPcl+6OOFAToIUAqqvCQdDlHVbI6qgSZT5xDG1EiNoGXIvFqUBZ3lLr1BHypD6YJVAIV4KIlyqatmu4TEzs3fqsZcpvM3j8/v7jzmBZoeo9nPcKXZZi3OU8JtQs9X9SStHLhWH8OBEtZlM/U5SC+Ps9RwyZ4jwgtoZRwNrp6v62Nahad9Gs0WzWYXlNhm9I8O+B9///f4nd/8TQI/ZOPYMV548SMcP36Mb/zKr/Bv/dV/m/2th7z52uu8+uqrXL8l/VLWSlwxnozxPDEujeIGq0mbtfXjZFnKaDRgNBowHPSEbaBqAR1JMjytabUS2q0GCsv+4RG5sVTA/l6PxcUOR4MBWZZJb2glPS+h650JAx+LJQwCAQWynCQO3XtF5MDzJMZoNBLpBfcDwNBsNCmyHC8IRMHMxVv1FvoBlanwfCWCHrl1vniKNBU/nLKUREorRVZIT7enhS44Gk0+9Jz+sxl0YsjzivFoQprmfOmr3+Cjz13k9Vd+QLi0zsqxU+B5GK2YjCdoTxNECYPhkDhp8NLHPsnd27e4deMa4BaMQFxM08mEupFs69Fdbt29y1Pnzkyf3xrxDYOAUPuUVUVRVRSVoagqVFUJuu96OapSONO1r4QgCa7R3PVQeH6A2I16rK1u8OInPsJLL7/A8RMbeJ5BG4PvQxLJQyZqMa4XoTLih6Xk5zTPxWBzLnGYBvxupNYiADALYtQT75nh53OBTv3KNAadBUlaaQzW0SXMLOCiHtsSeNWornU/P452yMGmfX0ORWaumqPm4Xjkd6ou9/6E50UIT3PfywoaJojNfO/EjC43HwDOaGY/IREE9+3qoMSh5+7oEtOKmlr9/jrplIZ/45A4JxbgK3Too73QLd7MAlAlymhBEMozGCzgeUGdLzIlINnHqxKzc8d9ZzuVAgdB4P7hf/1PuH7lHqHfQKuQpaUVzp89zTPPP80zzz9D3F1gtzfGorh37w6//o/+Hp6VKkKcJNIPFDdFMAJoeh7r6yc4eeokKoinEszWfpAE8U+5f098hw+zzdO26kqNVaLiVRYlQRi8r2o52/RcwuDu2PuAgfcfpz5HpZxzvEY8CCpDxYzCmBclW4920F6trOY8k4whzwuyvGCSTjjcO4Cq5Oy5sxROYnJWKZrv3Zrm1VTGOJEM6X/L05RJOqFwvXTYmSmd9n0817c1TewtaAulPMTTY2kNUdJmMnz42HdU2sMLQqc2N5esujH6s2j+/+u+TUYTqaQq7caKdmIQUFGPV43yZgIiWEk+rTP5M1ZYAtoPZBUzMB6NqKqK0A+IQ5/FbodWoqiMZTCQCvTezgFK+8J8KEsX3GSULtkqypJOt8Pbl6/he4p2wyMvJlBFZJMRvpY5tREn1Fz85bVVwjBkNM65fXeLIAhZWVri4aNtrLXEScJwMOTo8HUuX76OUpqsFOpKI4x5+OjRVAHs9Tff4sHDLZqtFgvdLmtrx+gf9VBaxs+Nm7f45MsfYWdvn3anw+bmJp4WBTPP15RFzmg4gKokK8WWvihLFEKjacY+RZrRXVkhLTLSSc6lixfZ3T3gwYMHlKUhKwqaSez6Xj16vSFFVhC4ecxUMu8dHByRJAmrK8ssLy/haZ9+/4hmq0lZVkRhjNKK5eUl0nFGkRQksSKKYrrtNvuHhzx6dEgYhix0XfV2LCpUzSTm+ImLfO7zX+L5lz9Ga3ENW8HB3gFXf/gd3nnrDXa3HrjeSLC2pN1u88lPfIEvfenLnDt/geF4zPd/8H3+6Jt/zM1btwCIk5iFlWUBWLF41uL5QoMWkRTpTxUpV82UZYGodtYgkQa8wKPZbtBaWqCzsEDYbNFaWSVuNsFYtm/d4soPXqUap2IHUD/K07/rakit8gjaWvIqp7vc5uEjTTWuyNKJA0wdnOjifWstyjNunZF1uQK0rdEhUUUTPFJNp3NpFldu/nR/lCRNmNl6qeaAODvNSpBrYsWTsD7/WZ+NW+GUwio7na+ZqwYZK8lLPesqY3HeG0JbM7ZWSMJd8Tkqn6O7TYFkSxxHRE4FtKoMo8ERf/LHf0Q6Sfmd3/5tPvWpT/O5z36GT37+F/j6N36Fg8MD3nz9DV5/7TWuXrvGYe+IqiyZpAVpKqJIURwRhCELC6t0O0uMu0PGoyG9/iG+70mflHEGszbHa8dkZUG706Qy0tNngckkwyIJvCkLGo0YUEzSVFgqno+qoNGIUCj8IGB1ZYl+74gKUWOrKkMURZSFqJUlcUJRFIzHKXHouwq2xErWKipr8HwPX0sS0+m2RDrbF1XEbFKgPFGFbYTSs7azu0dRlkRhLJYb3pyH24fYfrbEpqrIspw0K/jar/xFNhcSfus3f4PKCzgRt1h0vhkouH/nJgcHeywurTCZjHjmxZdoNlucv3iJMAq4+u7b8hC5fpciLzCuobgscq5cfocLZ886Pw5FVUnwoA3sPdqiMhK8iDuwD0oa5lQQzKhYupZr1fVYmf5lKkWj2eXpp57mYy+9yOlTxwl9hSlLPAoaUUwSBY9JBVdm1utT+0hMsnwqi1k/+NNkhSeSGskUZH/uNYt1/cJPorpzO0FNkwGlakTDDX6tCLSHUppKPy7JW1c2pO6lp/urqxvT/UxpPtKn8ngYO0M5ZmnKT9+mgbN6/BO6rij5nniQPPEZORtXWamPbaHuk6gnz2kiU79/bh+zoL2m2Qk9sTIiW11fH+3kB8MonN0jUxsrOlf7ICQMAycm4c8CfMsU05keW46IVRbtrvWf1xjvac3Vy9e4deUhf/2v/lUuPXuOsxdPsbaxRNKIpwFzVhp+/MZ7DIYp3YUFmt11DrfeE38AqwkDd4c9jziKOHfuHMdPnsGPmugwot3uUFMP6nOYfZcnq2z/07Yn6ZUz9BvqzLmqDMFPOpidJbXzl+1JIYH3nT/yHFfOB0j45YWoo83JPaM94jhib2cf4xKPKI7xfB/lqmxWKSoDQRRzbGOFRqtJZczcYKzPafaSxLkCeKCkb3A4Gskz5mmpBpRO+SkI8PwIrKFIU0qL6/uTW1PNXzOYBg1Js8XgwNFOlXHgiEahHdVmfiy76uDP4Av0r/v29//hf8fCQoeTp06xsrpGq9UhTSuKspjOU3LJZ3NwHWBOUSEcJdLXaGcnoD1ZE0pryfKSMMtorqwwHPdoRh5JFOP5UlnsD47odBZEWQlZa+IwxNOQNGKh/Sh5plp+QqsZsbqyyM72Dutr6xwcHXHnwQMWFhaIakNqL0QDg94hSejx9KXzvPXuNQkkfZ+8KOj1peK70G0zGo9oxDEWMVSuqoLKFFTWcNTrMRqPybKcXu+IbqeDMSUbq4u0Gg3Sds5kMmE0mbC5sYHnaZqtFjtbu/RcM3PuQK0sExNrX2t8pYnDgGMba2zv79MfDLl27QZ5JhKwxhq6nQ55mpNmE/H1aLawUUUridncWMdqjQpkbNy+e5fbd+4RRD6tZoOjwx54ik6nQ+aCt0cPt7BW6MONRgNlhf43GAwZjAasriygtaXdDFldXuKFFz7GL/zir7B56hxBFJMWBffvbXH1zbe5+s7bHOzvSKCrDFBy/PgxvvKVr/OZT3+OTrvLtes3+M/+9n/JD370A4bjMUmzSbPdwdgSEdKpe4EleBNgtnoMHXXkCrTnFMechK6xUiXwAp9WO2FpfZFJWVHmBY8eXCfa3WXj+Cm09ohaLU6eP8Ott64Ktb+epwHqddc6byGYVnP8wKeRJLKWFiVlLWxT50FY8biZAp9mugbjKp+q7mepj4Vy6s9KEjQnCmDt/Jhya4Lz4FHMfi9zsJ0twRYw1VR2Xysp0Oj6PUpPUzcn6CcHrX8/B7KauoKr5JYaK0mDnbJENKUVeriAdOUUsK4TPjmAyFFHfkDSaJA1Mgb9A/74m3/E7//+79HuNHn++Rf47Oe+wMsffZnPfenLDHtHXL1ymVdffZV33r7M/sE+ZVkxGoxAW8IwIo5jms0uzWaX1bUNxuMBu/u70gOnNavLK2xuLHHj5j2Uo4M1nWR6WZZkRQHWJwhDWf+Uohu2GU8yjo6GaC9AIzLOxlRsb+0SRQGBFxBGAcZUU3N7rWA0GmOtppEkVKYiigKMNcRxSLfTYXt3lzj2abc6jMYjqsqwfyD9MlmW43sBmZOZz3MxN6+swdeKOII0SyX29x5fh3/a9jMlNlmWMRlP+MTnvsJKJ+L3f+e3uHbnLkmrzbHT5ynyDBzfPcvGDPpHjEZ92u0O/aMjlpZXCX2P8xefIs9Tbr33Hp6SbCyOY8aj0TRYvXP9GmVpCF08Xsvqrq6tsb6xyiDNnkDG5S+H8c+hApLYaCfLqFTA8ROnefmll3nm0lk6rRhblqIhHkS0ui3xjbHCixa9dunTsdadhxFX9clkIlKRTpKurg5Nz8sh/dhZ0K1xfUYeKFV7Yzj1JVWrxM2C8icziSeDuvo1wTdmgb0x1WM0nfkdzStDyf/n9a5reL9ONOqd/Pnh7+P7rCO/epDb2T5cX5F1wWxNA5vGsDVJeP7LP/HfOo7TypteK1HHmhkx1ko24k7vEcTB3PWZSUpLI2hIEIb4zlRzdl3nTsFKDaqevOarBdOf5+7BT6MWaq05Ojiku9Dm//7/+T+Lv4WpxTKMWzxkCz3N5uoSabpNd6HFr/61v8Kf/MEfcuvd17CmIPA9giCg3W5x9uw5No6dIohaREnC5sYK3W6bJx+lx7/bT67azCcTf942f12f3D+A53vStzBVR5sDAlzm+sQVf+Ln2bkYU1dmnZeSFYS9FnEQHfbpAJgulsury+ztHZLmFdqv0IGPdn1WvufTbDVRrODruaPXq+H0ejyZWGnKqmRvf9+pMVXcvXmDt179Mbfeu8qwf4SpSuIoZmN9nedefJHnX/44raU1ilwcq/V0/pIKUn3aSsGZc2cp0h6H/T2wdkqdy7PCBQ1OnKUGQZRUdH6+fbhtNC4YpYe89vZ7WKPw/YAgjBjVxoZO4MZM5ytAyVowDco8qZr7oUeRlwRByCSdgKvSZlWJSi0bx89y9841oiji8OiItfVVbrx3izCM6Q0HDPpDDBZPK6pKSxlPgdWCaq+sLrLc7bC1tU2n3UErzd7+Po1mg6Ky3Lr3iAfbe6wudzm+scrqckwSNxinA1qNBZ6+eJaDgz6jPCUJA4IwQAHdbodJNqY0FWEUkkQxR70+a2srPHWhzZ17DymrSuSWOzHj0ZBOEtNpJ/SOeozGIyZZynA85vDokDiMWFlawZQVQRgySke0220Oez1qT6wwCDh79hxbjx7wxpvvsHdwiOf7ZFlOp9NlcUGMDhtJguf5RGFEZcSDzfM8stGY7d09BsMRaZmzuLzI4uIizVYLq0Qk4MSJE2itGY2GxEmMrz3Gkwnj0QitPQLfpz8YkmUFC90uCwst4jhkcWmJ5174JC+9/GkW145jlUeaZty5+h5vvf46N69dY9TvgzVYxOX9mWee52tf/zovPP8R8jznz777ff7g9/+AG7fvEEQBURLRbLedCXCF8rTYP7jpydSht6dQvhZATWsB5KqKII7AfS+JL8Tjw+iKIJK+0N5BDwscbR0wOOiT37zL9R+8Pp1vA08Sb+MWhFoZraYMzxIpNzdbaLaaIraSFWJ8XhmUEdp4bTcxU690oCquqmKd6bGt+SDK9f6IXYVCKk8WsM7iQvqElNNNE1EAkJ4f7ZKbyqmQKtdFJImYSFYrrYWCDs6zUCpIdRRkYEYxrqvd7vrXdDozFWIRSNg4UFUrsNpQVmrKDjHWOt8d62IS9di6aevPoygqSxJ6NIMAU5b8+JVX+P53v0ecNHn2uef56le+xCc+9lE+/ZkvMBwOuX7tXX70wx/w2mtvsHewT5bmjEYT/MAniiKSOKLR7HKq0aGqCgaDQwbDQ8a37ws46iiF6SRlcpShPVlTmr5PWRV4QUIcxeRZTl6UpM5HqawqmnFMEHjkeTYrOlQVk8nEVYit6y2MKYqSZqMB1tIbDgkCj/39Q3xPEfgBo8EYrSDLC7Di0yUPmKasrKjuuSTL80K0MeRFKQmP0kwmE7T+8CyEnymxybOctWNnePrCWf7H3/7vuXbrNllRYJTIN1al8NJbnfY0YC0rQ6/f4+q7b2MsPP+Rl2k3Er7+S7/Cbw3+Gbt7u1iLGHylzmhIKfb3dkjTjLARTQdgvc16FuYrGe46TVUwJKCtqyvdzgpPP/00H/3Ii2xuruJjsIXBs6I8FEbBFIGrPTDqIE2UzByNyU0Ak0k6rUpUZSXKaloLJ7dOpqbVlcfVlOTnJxHVGq22j6PoHxBPvj/glslAaYWnnDuwVlPk/IMSm8f/Ze53wim2qhaQnH3mz4ttP5CypN73AyAcYWNrsYN6KpwZJ9ah9nSydB+fd4KvqoqiyCjL0iE4alqJ8Tzp47IuWZg15EujeBCIA3NtdFhX0gTtfn+IXZ/RNPeys+8k11898bvZ52YVtNl1H/R67O/scOGpp6SaNKWLzQfQLpmylmPrSzza28fgs7Hc5Vf+wq9y+fwFXvvutxn2dvD9khMnl1g/dpqks0AQxmysLvDcs09RuQTwyf3Xz68rl3zg/fvzEprHEvkP2GYYnRVn7zTF8+eb2+dRvFkwX1/H+v7VCWtlZuZvc48F85RIpawgdbOTnP6olWJzfYWV5UWyWlM/CMRzCWFpV5UoF8FcP5r74YOqRZPJmOFwiLVw+d13+b3f/B+4e+Mypiyom2drKuzd+3d58803Wfu93+PzX/oin/nqL2OSJlg9rfrV36c+37XNde7cW0HpkNJCXpYYNJN0QjqeyMIvg2j6VU318x6bD7v5UYyxsLs3drLtzvRPizhFTW0xjkqFVU6VUIuypif3rZ7/lXOID0LP9T8a8UgJfLwgJi99qlIkfrM8FdqrsRz1e5jKEMWR9OlpNR23nqfwA49JOmHp7Gl2d/bZ2tqVAK4oGaYTOp0OVnloz+J5ijCApy+eYKHbpajgh69eZqmzzOrSCu9evUar1XRGmXD+wnFGkyFVZVhaXiTNcsqhYWdnj9XVVV547inGk4xGp8WtR3cobU4xTul0mnQWFtnt9fDDAF15eNqSphnj0YhG0mI4EeUypTUnjm2A1oR+QLfT5tHWI/B8SgNBENNqd2gmCYEfEAQBB0eH7I33ybKMr375K0RRRJrlbGysM+oP+fY3v4VCU5aGBw8fSYBORavdIEli9vYP2NzYIAjFbLHdbNFptmm32uRFwXA0IokiQt8nSRJOnz7Dcx95mdMXnqO9sEoQhIzHKXdvX+fN11/n5vXrZJMxyjXNtzstPvWpT/L1r/0SG8dOsrO9w3/7a/+MP/7Wn9Dr90gaDZqLTffYKEHdg3C6zpvKgCfrklSPkfjByj3HBY/WWIwSXx/P80gnBXmRiVKsLwpeaSaCMod7PfJJNqVRaev6fQ2kduYx6HkzQFPk9+dMiK2ArEZBp9um3xthCkORFuLVgpHkozIuOZutj2JtW8/Jyomv6Ck+ZNwPyoE09fGgxreUm2slBaq9dRQzU/Ipk0ZNpz60dVUdpTBol3QKIF4zAkQpTq4Fyu3XreX1mq8tVAjDQY7pzkwJ6GTQ8n2MABDWSm+u1NGhxN1DajB0bt2wNRXeKdl6msiLwFa88+ZrvP7qD4mbDV588SV+6Rd/iZc/9jE++ZnP0Tvc463XX+PPvv0d3nrnHXr9AePxhMl4TBgGxHFCEAR0u2ssLq6gbEVlxlT5iLLsC/XZUySNmGZTaKvj8YiyKPFbAUeHA1mDlJr22YzTMS2V0Ok0GU9SbGUxlZjPl5UhSzNCP8Ba6T33tWY8HhP42iVzhjjucHTUI0li8qykLGRuisOI0lRM0pRG3KDTaZGXhmYSi7dWpylJECKUkCQxjUb7fxm557KyfPLzX+LVH/wZl9+7QVoWKC0Il3VBo3BvNVmaTnmaxhjS8Yhef8SJSy9y6fwJ7l+7zHMvvMy3/vRfYq3TEw8iJk73Oh2PGAz6tBurQjVx5T7x/Xg8+Jv/t05srIEw6XD2zAVeeP55zp89RrsRgSmxRUYYxTS6LfxADAmNmcnq1Q3ftbiAcUGfmQap0tvA3AMsP7qoog563xco1v0v83STD9h+Enz+2BseD65gFnxJ0CrXopaCnl2fx3c+T22bhnEOKHy8afsnJC5P7OvP2+YrbN40GZj1xUxfsXZ6bkIDk+eoLCsnG2ip+ySSJJoaaM7ToCojZleNICYInXrd9Dzcv3ON3U8mJB/8JT/g97ZmKTP9mw+4w/V3z9OUB/fu89QzT4ucdf3O+WRmrgpkgSj0Obm5xs27D7FJBChefuk5nrpwlps3b9M73OXi0+dodxdJ4pizp49x5vTGlI45v/8POkZ9nn/+HZx9/smEZt7Ech4YmCbWbrGqF5f6OtUVN0liKmnor32e5nrk6hOsOdM/9VynpgfvP29jKqH4RJ7bpcWWhtLx2C2yoEuaU7tmTw/+WHI4Hgv62zsa8i/++W/zZ3/yB5TZwB3bIZXTU5cfCpMx3h6y95v/PVffeYe/8u/9Lbqbp6isc92mFtYQBNNqS9xuob0Q43welFPL8qfBiZ0mUdahmz/fPtzmeR5FNhP1qOfC+pYbQbWmvH6LFXNnoCjEfFlraLmgE11TdSx4msgL8X2PRiN2tI2KxYVVDBW7O7uCYBaViBUokZIOGwm+H2GsYjSeEAQ+cRLTH+Vs7x4xnoiJsxWImNALOewfojCcOXmM2IfI1yy2m3TaDdJScfrUKX706mXWVtf4+EdfYPfgkLv37hEFcGxzmf3D49y6dRPfhzPrx3l3PKYocvb39ijyjE530akFliTNBqGGpaUu1lY0Gk1GkwFLiwtkecHh9gHD4YilpRX0YEiSxOwfHNIfDDh2bJOjwyP2Dg5oNZusri2grWYQj9jZ32d7d5dGlEz9eLQ2rK50Wei28YMI1ISokUyd1wXBt5SlJQg1Z86clCDW9xkcHeF7HoPBkKPeEcVSSZqmgsg7xD0JE174yMt89ktfZ/P0BfxIegPSUcr1K5d548c/4vbtm5RpjtDNKtqdDp/7/Of52le/xuLCIu9evcbf/8f/D3706qtYBXGjQdxqSRVAawJfFPGKsnJ9W/Jw1T5ayvPEbNwZS1plp95oSiE0tMCjsBVVBYWjq1ljaLe7xHHMZDRm68EOlGYakMt8IJUYU9OqlHLzbDmdSwGnsjcnuqLEHmFpeYnBYIBGWDvzFQlj5fuZGrxzKUjdxWucumDoBw60dFmIWwdqNGbKcqhp3Fh3rWGWcCGKbri1WjsAay7+kfDKke2rmSpcvXyYOtlBZJ+1izjq3rm66mPm1w6rXEDkzsbOkhbx7qkrYDNwc5bkzb2ABOm2qi1HtShzKrkeaAHZqiLnxz/8IT/43g9YXV3h81/4It/4N36Zz3zxy3z6C1/g9s1bfPe73+H73/8h9x88JM9F3tkLPJI4JopCFD6oFlGjSRi3UTYTZgMlw/4RcSMRSXtjePRgGxFCUHRbTbKsIIkC4qglPWNWaPNpLnLfvifSzZUR49vKGALPE4Df1yRhyHA4pNVsMeyPSUcFoSeCAirwMJTOaLWukBm6nRbtdkuUbse+i1vqd8gzMR6PPvSc/jMlNsfOnEeXY955522ysnBSiB7tdps0TaVh1SGgxlSPIZznnn2Zv/Bv//usr6/w7o9/RP+oT7OzyNLyMvv7u4AlCEOy3DkclwVHvQM211ddgG7Rvkjjid+L9NXMN90ba7Eq5OSJUzz7zLNcPH+OpYU2ceiBqbBFTpLENJNEFiDLNICq/1SVTCiFM/iS4E0GnfZEYW0e6XgSYZ/ByPJYS9BWB5bTT70PmX1yf08mLx9mexJJrl2eawf5x5OhJxKcuXOSBV676enxz/3P3R4LrqcJzSygh1mAXCMhpWuqFS6y0K7mkwBTGWnc9TRxHEpFxvel+gRMe4iwU0rhNA35KQWJ990bmKs0zPahpnS7OmWd4vzucVDTjykF/f6ACxcvTKfO9ycZT1ZKLNZWHFtbYDQYsXfYw0MRBZpG7LO4+LzIWVvDyY1VTp3aoNVwKoBqLrF48vrPJQzKGZ590PZB5zVLPKV3Js9zskw8IYytaDYahFEkSJaS/hYxRBOUrwYSqrkk5vFEaHYJH0thativToHnrp1lPhn8yf1gj99zO0UQcYuzVrOFaabqohzdYPYITCaCSu/s7vFf/qf/OfdvXwGboXQ1Vd2b0Ttmy5yxiqpS9CvD6+++zfDv/Gf8zf/9f0j35LlZwm3tnFyrodVooIMQC9PGYqW0K9/Pzrf+YX6O+vn207fAUZyU0nMPhzx8WnkYg5jZUj8Ttk53wViq6Uyp0B7yk5YeqCwrKJAkJE0nfO9732OhE9HuXsIYw/7hEAP4nsZTliDwacSxIM7KozSwt39IoyEBx95kyN72G3QbLQHbbEWzGRCHHisLXY56R6iy4NKl86ytruJpjyAIub/zCO0FZFnJ0VGfvCw4deoEnW6TN998A4tPmuV0u21arZhmp0FJgZ94rCwv0z8c0r93n83jm2wsLjMYDeg0O+jKZzwek2UT8qKgHFW02x1GwZDQiySQNxX9/hBlodvpkEQR6WTM6vIieWG4d+8ho9FYqGFBgOf5DMZjFjpdqgoOj3oEgeeo5HWAa51vmPQj9QZ9oiSku9CRqn3gs7C4gHK+aqWxgCadpBhb4WmPpN3ghRdf4jOf+zInzj2DDmIqa5mMx9x+7wavvfIj7t26TVFkKFtgKVlaWuJLX/wqX/jSV4mSBm+9+Qa/8T/8p7x79Sp+IGqFxiH7QSj0MKylMNJ/vHZyg+7KIlhRVPVULbtfB9cuQHZzklVO2dPNmSAxS2lEIMkai681ofa4cfUWNq+cpYA8rWZuslO1MTaO6WIVIL0nU08455Ul6z8YX9NZXGDr0TYaRVkUj9Fea7uK6Ro2N3oA7BS4c/NnTbdX3hS4US45r4+pUFRYtPapo5Dp4Kv/dbLsdaJkbb32KkpTGzYD1gm2qBnFd36I23pOdxO7mkrxu+O6bEk50MO6a2op5fvPrSUz2pkV827EFFXAdtm/5xIA6d2TnmZAEifUNExTQOBrjg4P+a3f+E1+93d+h2eee5pf/sY3+MQnP8Vf/ut/g69+/Zd5+823+N53vsPldy8zGo/p96U6k0QxSZJQFAbtxbTaq3zyU5/g7KlN3nnzu7zy6g/pDzK09ijyikYST8eYmHf6BL5PkWWoKMIYw/Ly4nTuaDYbJHEizf9ZThyE02dhOBwS+IEk0ki/XpblrjqsKUxBGEeYzBKEEk9rrZ3xtlQgxfTVo9ls0+8PmUwy8nJGz//ztp8psbn41NPcu/Eevf6QldVV8ixjPBlTVCVZVtBoNNjeekiUNAiDmG5nkaS1yEc/+yU+86Wv0/Atd95+g4O9HfZ2d2k0G1y48BQHB3sIVUWayo1r0u8PBtRxoq0MXuhhQzh+9izjoiTPxcskz0qiMOLChad4+tIljq8v0YwjkjCkKktsUZI0ItqNZNZw90RCY0wt/2ymlLN6m9JjrKEqC6pK4XuirKYdl/2x7PyJ7fH4ePbOWQX2/RHlNBit6UJT2tBP3p5MVVQtEelg4CmIO58AucPXvQOPHcOpkj3eyzRLKD5MhebJ7YMQ/inyYSUZq6pi+nvf92eVAFv3F0gQ4nseYSQqZb6npyh+fb7GZTH2ieP+LOf6+Hd8fB9TutyU9lRLZM/4xh90jZpNoSbUk/ZPS2qe7FU5f2aD9fVlHm3vkhdC/fS0Jk0zOs0mJ0+syF4dhWH6yQ+4bzNQoEbLPjjRm6/O1J8vipLxeELv6EjM4AIP35PApNFs0+/3Ob20TFEWjIdDskxQ7jwvKIpimsQ8dj2fvFZPLBqzvi2m93V6v6fnWf9fzeWgj3/3+X3V31e58WIdMFE3qtYVVvm7XhilEXo4GrO7dcDf/dt/l52HNwh9S2U1ptIYh/Bpq6fy4bMxJItdYSxVZrh+9xb/7X/1d/n3/+P/hGh5c+69s2pMHAcEcQNrLFmWQl5wsLtLmWdO3c0lO54E4z+v2Hz4LQgSilz6YWTMIgGRFiClDhBNZafjzfP86XyrPYf7aoXWsqTKvQAPLUipVtgKsjwlCJqiaud5xAsdOp0OVVkwnoxBIWaYWeXQfp/hcEK/N6bIpX9LlYVUoaOALC0pyoI4y6QKGYRMhiPKsqDTbru1DU4e26A/hrKSVDnLMh48fMDy0iIvvfgio2EmogDdJtrX9EZHxM2IpBFjfYsX+ujCsP3oEcurS5xc32TQG5COc8IkEs6/iUmzjPt3HzHoD4iCiHarTeBpTh4/xvrGKmVZceXqVYIg5N2rN/A8qaSPJtI3UzpKZxxFYC15mVNWhsFwQpplKO07UQEHaLn1ACtIslaaRw+3AEu/32Nz87jzx0hJkhjPg06zzQsf/TSf+cLX2Dh+Bs+XoGw8GnPz+g1ef+UVHt6/RZbnmLKiLAuWlzp8+atf44u/8DWiqMGPf/wj/sk/+TV+/NqrHDt1irghyXFhK/wglMZraymqijBKaMQRcSMhK0q2t3clkCulSV/PxQNT+f06WUBERcpKvMmMY7RIBiBgZA4c9AeYrJTigid9xlPlLitzWo1dKWdiqdBUU4BvBlTVvSQoix/5xHHMaDSZKs76uvbBUZSmxNf+Y2sJzKqb9ViY9tfWDBHlUVND9LRqZKdF9sdAWpd4iAG3cTmCna4DyjX2G2uhciJP2Ol3nMJ6dq7Xp14rkCRZ3j2n46pmCNb0q8wWCxn3iqnXn5nbl6wdxslMmym4CRL4l9Yp52Elv2OOaeTiXeOANqVE3t8awztvvcXbb73N0uoaX/3qV/nq177KJz/3RT76iU+z8/Au3/7Wv+J7P/g+hwc9RqMJ4/GEMAppNRMm44J/+S//lMXlJZ59+ixJ4xijyQFUE0JffPWWlrooZYQBozypthkxeu102kzSFGshiWMajUTO00oLRJRE5GnOZJKitFB1q8rIuuX55HlOu9vFIIybQa8PWpPEMUopRpMJg9EErTzx26pKms3GVJU0jGQfH3b7mRKbOE64+MWvEbc63L51g93dHRqNNl7SprO4iVIeWikarSaf/YWvMc49Xvz4pzh56hiD/S2uvnON7YcPGfQPRJZydZnl5XO8/vorFEUBSuH7PkUumfxkIp4ndYNanMRs7+2hFLSaTbzlFboLC2wur7K+vMRiq02rGdOIA+qBHQYhzTgWxRHXlF0HvfLHTpOaGmufR4lnQed8bUEU4kxVobRBu36OaR/NXET2/rB2uhzK37UBlIsqZ+NZzf2pAyz1+C7mtinS6+CLKbfffSlPabQ3G4DTHdWT6dw3tHP/f1IQYL6q8niA+P4A/YO2+nNCKyvnVMpkofdD3/VNzdS1ylL6rgLPJ44C/ED8YqYzjq3pInV17QPEB/7/sP0k8tP86/WVqxseZ++ZbUmzwbDXpxUGzs/Czl3XuarD+/6Vnxuh4vyp9ceCZVu/oT7eXDXzA8/5fUmEq3E8kVQ9eT+FfjWm3e6QJA3a3Y4kpU6Rpu5Z8jzNoD8QgY2qIE4arnfNopR5LMF48nvOH+vJ1973fD32ebkS9XM/a7OR56L+/JMUvPln2KU406DCrVZzd1J68EbDMfv7h/xXf/u/4OGdy1TlSIIULXOY9iLHA4eyKoVCW+ROWVEqcEoZLJpJPuHa3Vv883/6D/gb/+F/TKkiamqEUoJ0RlFIY2GZ8WDfjZuCdNinykbUXHbrrgdKYasZxfHn20/ftOeJx9N0jpUqngxhEfXQrmpf/ymKEmuMu59iSlyVLXm+rUSRXuBjTIHWvlTutMLzAqLQGUd6msAqYt8nrQqWul3wFO12i/JwQGUqdvf2GA/H1OuFNWJD4HmaJIrIs5yDwyOWFtsksU8UBORZyvXrt8myimeefgrf94iU5vbl647uqFjfWGFpaYGrV26wvLxIo9mk02kRBAH9YUppR+IbN8kczavEKEuSROzt7GKrio3VNdrNFkeDPmmWgVaM0gm9/pAyL/GVR6fd5PTJ4yiluHHnDnfu3SMvSiaHAyywuBhilaXRbNDvDYiiiDAMpTpgDFmeA4qqqMjT1FXHNZPRWLydrKiraYU0KgceoRGkv3fUwxrLyuoGlVUsLi3xkY98nM984SusHj+N9gOsgfFowp3bt3n1h69w58YN6Y2zOdlkTKPV5hvf+AZf/drXiBptfvjKj/iNX/913nnrTbSCuNEkbDYoq0LkbZUEqsYBb9r3sVqRZxlZmqI8DzzPzVEKZRxl1ROZ+OlchJ1RpWrvNWtkXXEUc1sKHa2sBLzVegbuaa2cgbTComfVcLdvmCUH9RxTz3F6rorfaLUoi4oyL6ny0r3PuOMoVGnxGt6U1qeUwrrKp3XxjACMklCo2rxziljL8+gKFth6bq6tCdx8buq4RrwrnIhaPUfPeq5nczjTf4XaVydt7rzVzEfPuPep+nxx0369RNZed+6/Comn5D/1vCvzhZlbKSSh8aagW90DXBmD59dvsbNjUNU3YC58sYj3ncwpCjja2+HXf+0f87u//Tt88jOf4Rd/+Zc5f/Y0/87f/Fv8hV/9Vb73ne/wx3/8Jzx48IgszUjTCUEQ0mq1ONo75M++vY8f+rS7G9hywni0S16W7O4dEkUBVWWm/YIWy3A8JghEwr3RiMnygtFogqcVRV4SxRFWQVpkAgZYEa7ytE8cB8SRR4cmB4dH5GVB0kxoNZtkRe6ExnI8JX1LYSiVnqq0pJOcMPDxPfFp9P0PL4jzMyU26JCNk6c5efIEeztbvHflCrdv3WLt/DP8lb/8l3jnh98FY7n+1puMMvg//V/+rxxuP+TGlatcufwW21tbVKag9vpotlssLSxMNeSxrllSgVWabJxO46yVjXUJELIck+YkOubZ85dYX12mFYsEXrvVxNqKoqiIAp84EbdlxfspZ7OkxiU2c8FVHRT+pGbqx+k8YKsS6b37/7H359+2ZdldH/hZa7enu/199/XRZjTZRkYoGzLVJJJAsoVB0kCAsIVtKFXB8KhRP9Y/UDVGlcvlwtiDogYIsF3YDJARyEhCCIEkUk0qMzIzmszI6F/f3fb0u1tr1Q9zrX3Ove++iPek4Z8q1ogX995z9tln77VXM79zfud3esUzvfxZ1a4YYeOUiR38NP515cAnvy0+6WeL8oZZGOwyx1uaS+C3Bu6n9wFwzGOCLASx93YFzwAEx0TwWQQ60PL1na70ddrvDzKmlxPAQXjtIcm/Xcz9MzGNqJnFsUgSJj5qs8i/EU71MiC43ygO9/Th4Oa0e/ujtdDnC2/54vqOe9B1HNNUFU5rT1sKMuILStbCw78QxAB8MTR/Z0tY14Wx9QBseT/9MGxzjpBh+UHRIhmnMkJX+znOwc27I3+cRMdqJ3NKaSVGVFOJzr4N4GK5r0NkazFWl9tJ2tvJv48fEz60/G8p0iQd4A/xGzqngaeTYE6ODdPPOpjOZszmBf/D3//7XHv/21hTSsXoTr4kCy5zzFpL7GTsN3VCVZVUVdGOB4fBGcusmvPyt1/hmd/5DV78k/8RzdJ1Ogdpotna3ua90d4CDFvT3rjj+FxYzq37qH1ws8bn7YVaFHYp70stomaLMeqo6wq3PIaC11l5+kUSo3SEUiJtLOcxUtMpFoM35G2OpwWdPCFONGkec2bnDJPpnFlZ0XhBnuBtDvM+TdPg6RCjQMdEcUKWpRhjOTgsOBi+xztX7/CpTz7D+bM7nNneYHWlR55lXN7ZIukPKOYVN27cwljHhXMXuXnrJtdv3SbKY9bWVxgejqgLiRagLFVdkXdShof7jA6PuHjxIhubm5hI5oWKhR7nlOP8hXNcunSer3/jW+wdHNJYQ90YDocTnNMkSURtLLPRlEF/QBwnFEVJlok6pfJJ82GRG43G5FktdX9mM6pp0RYLXVkdsH1mg7KcE+mIc2d36HU7XLl6A2UVX/7SV/jSD/4wOxceQ+lIIjSTOdeuXueVl1/m6rvv0pQVdV1QljOiWPGnf+zH+PH/4M+wsrrBK6+8wj/6x/8zb779luQx5BKd6g56OA1xnApNPY6JdOSN2LBGQcjaUloAbagc6ZRpDXp01NZ+tNbIZ4wRgQEVhHNkgW+qiqZupH+sRVnZU5PW4acWkQa8Q5EgXex88WDXrpvBOYIL6mBgGsva2gqT8RhbG+bTQp5Fu9dLrk+mQk6N8utQmDs+rwcwTpx9kQvKmRI1Ejac8tJjUiuxVWqzkvNocVgxmvwc8LtdW4dmse8FEAEOrfRiLVTL74HsPUqcbO1KsFQWJMw5PHUugCbl3/OWUhT6r/3IAiy2LXwOWjpluwG5xdriHG3xz2WbSKh6S99tpaZeXUz5vd/6N7z6+nf4v/2X/3cun98hTRL+7E/9JD/0la/w9a99jV/6F7/MjRu3qKqGw8MhiY7oDrpQi8pdksSsbV5mdb1kb/cGdVUQRxFlXaKMpdPJGY9nJFFDnmVY5SSSUjdUdU0UR/R6XY6GQ+ZF0fZr0xiILbNZQV3WpFGEaWo0YOqGJJUoqakb3+8RcaTo5DnOyjHi0HZMZ3NPZXuAYXNKeyRgc/bcebCGRsHq5hk+9dIql59+jnOXL/EHv/YrfPX3/oAnn32KNO/wsU9/invX3+O733iZt956i+FoRG1qZuWE+WzOkx97lt5glU5vwMrKOgeHR16AIChsuCXvJhztHXBwOGLz3Dm+9PHn2NrcIFaQJSmdbockDonAMmHyLJGJYBd67UEGWH6XybNA5ycGVGtUHgc49xmHy9alA4z11K9FYmDwpPgTAyzBmuVzLb5bJqElCBko76kIBBy3ZAxG4Ve9bCAufirc0iW6NioQjKvl6FRYLMJtPShis9xHp7UAZIKRHnmaUpZlx+/TL3TKOaI4JveylVqH8KzyEr4W0cdfWkBO9N1xcPJwgOa03x+tLfpcFr4lw9h5qNgCaQF2QelrPpsQJcmxsRZ+Lv7p9vXAgT7+7cvPZHGe0/rnWPML6qLu0YObtbTXbi3s7x1ycHBAGqcQqVaUwTlHYxtms4KjoyPu3rhJlsa8+LmXsKZZAAu3iKAsX/fJdnKsfVgERy2Bf/FzLZ6F9QPng+huy+14Lhh+wxcKT1U3/PN/9su8+8arKAyrq2tkeQc8BUw48VJ5OQBwpRRxknpjVx0DNyC1IybFnH/9q7/Kxz/1Emr1zMLF4RyJjrhwbodr199j4aFcgJpj96EW733UPrxtbm6Qd7qURYV11quUATRsbZ2hMYqo0az0O1y+dEm44NAWYzRGsmw2NjfAlxbI84yqyon8GuCswxlHVTmSLEeywYUuObeQRylaO5zW9FdWMSjq2pBojY00dS1rZBxFUsjQWUwDxli2t7aYlTNQHUqrcEQoDU1jmE1L3njjXXb3Dul0V+h1pI5St5Mxrb3cuU65cfsuVWO4dPEixsF7166AlboSLo5JYolAmaoiSiKSOMU0iuvXrzOeTomzBK0jNlfW6KY50/GM82fP8vu//4dMpjNUkDQ3ho3NdTGKIogTYWIcHA0x1pHGEdPZjG7eIemm7SiOo4hOlqETiXZEkS/s4LxaVmO5eu0mddOQxoBzPP2xp/jxn/iP+PwXv8LZi4+h4xSLYj4ruHvzJi9/7eu8+957lPM5pq6YTUdYZ/iBH/pBfvKnf4qzZ87y+utv8F/9v/5bXn/9VfJel/WNdabjGbVxVHXNmcGAKI7Iuzmxjpfo7BbTeCepdt6IBoyFRvvAalAPo2WX9LtdJtOJr71mCQp8OopEuVFJnjHBuWEWohdxHAk+8PuL8oCpzU1i4cwS4HO/89M66+GJpsGxtbPN3r09lFM0bbFi18olWycSzJLvFZAFsgZZb8g7iVKIDD0C6nCeNitIIgCZxiwiPbiQZygwItTYC+tiUCuT6KprHRKg2siSWyAaAZLySQ/DgptgeZ+BQPFrwVF7iCKIcgeKm/X9FgX7wy3AUbBTVJvH5I+N/N9tfUVPVHPiEHTOiYqMr9/ncJhGAJj0ewCtlihSDKdjQJFmGaU1VEVN2uny/PPP86v/6telJlLkUCrCOkNxOCTLYgEojWFvb0ySaja3n6CuZwxHe6TdHuPRGOM0oYh9VRlcJc8xijSzsiKOY6bTGUVZtc535+R51nUjYgnWUdQFcZJISQtjmYzG1MYSpyllKU6ibqfLdDKnbgxlWdLpyH4pjlxFp5M/9Jr+aBEbZ5lPJ6RJShQnJHHCymDAwZ27FFbx/Gc+w+XHLnDl6jXu3rzOr731Jgd7e0ymY0bjI3b37jKbTul0+/zgj/5ZOt0BSdah319FeaNAJqDcSBzFTKZTRkdHqCjhxS9+kUuXLxIpGSSdPKeTZTK4PRcwSeJWHtFZr4BhrFdaWhiZQT7anmK8L/88rT3wvWWQgzfgmganJBdH6GqLg1uqjzNyD35yhrm2ABf46ruLaI5rv9Atfe3C03gyh+DYY3TH+bTLxy9K2hzPxbg/UnXcax6A43JERhJBo/s+F8BOkF6O47jNkQle9mWD+zRK1LFuf4hndtrxx1+77xUWC55f0BzSQe6499aBjzY13qiVhFnrx6D1C1TwKoWFrKkbojQ5BizDTwEwC4ATqI4f9Azuu8f2JRdW7Pa6nTueRxbaSRAmY3J5HCn6K33ev3KTo71det2EJ596nCRJaZxl/3BMURnu7Y+pDbz4qU+Iwd96nY/3HXzws33QPT7oMyFyIXve8vlP6yc/f07p0/sAFJLEW5QV3375Nb721X+LpmR9a5ssyyVB2YiimzFN6wkMZz72/GyOwlFWRTu2nTPUDdw9OOQPfvPX+crP/CdUyLhSzhFFETsbG5w/e47dvdshwMuCb3cCKD5E3amPmm9KM5lOKIo5KNVSCLW21MYxm9VALZzxTi7zNXjFgSSOUNqysbmGqRscjixJWOkPcJ2Od8wET+4qKysrrarQzVu3eP5TH2d0dIjEb6xQyzbWmYzGOFOTxJ7a03gnlU9M72Q5ZV1TW3EaHByNSLOYGE1TVfT6PZyz9Ht9qtqSOrh8+Rx1bamIcSpma+c8VTlnRWucbbh3b5enn3iCXidnVpZtyYS6rlAa4hWFMw1xFLG1sQ4N7O7tc+/2XZI4pj/osz4YsLO2xe0bNxkOR3S7OVHegUgzmc0A6CDF/KIoIkoy4rhDWF6bqpLoU5qJVHVdMeh3qRqDso44jj19XABlGkeoWgGafrfHs08/xic/+yJf/MEf4fITz6FijSaiLBvu3L3DN//wG7z35lsUsxllWTKdDimKKZ/5zGf42Z/7Kzz59LNcee89/i//1/+Sr738Dbq9Diub61LY1NezMcaQZBlxltLpdonjiLqqKcqSsqgxnmaN8y5IFZyQEXEUY8GfD1QUhIJhz4gtgA2fUzjlyDsd4jiV/SPYL945G8RpjDHiSFU+JwzxnBsf8SE4BL0xLaujtwN8Ur/CtepiOtYMVla5+tZVlPOUcI9ogjM4pAnIPiefU0vhk6BED+KOtf56lZLolfLXFARl2q3Kf8Jg/fV4YOwCVd6f2IMb5xb75xJ2ag+x9vge39pMKphO3imJnE8p55Ux5RhNUMpc1ByLWHAzwndHWhMYNoqQxyPXHIhUSZLgnCWKEv9YAiAMINO1lEaFaiMVYhd5UKXlmVkUl598iosXzqMR1TUbxRzu7fL3fuEfcOPWbfI8p7ZGxoI1RLGmrB3l4SFJnNDr9XFGs38wJUljds4/xdqgw8HBIUmScW7nLINuV8qxWEPdSK6fs0by4IGjowPqqmQ+n6AU6I7YLOPxhMbKvCn9mpflmUQC5wV1Y4Vm3emyd3BIlkrOTa/fxT8K4iShF8dsbW7yne9c/bDVHHjUOjZFyfDgaCk8LEbc4eEBo+GQuql59dXvoLXi9o3bHO7ts7a2yp3bN8k7KVVZil57EhPlA6K8S+xlHZVWiwfmFEkqSZCvv/I9vvD9X+Klz79Er5tKNWRjyPMOWSoDRAFJFHk9daFoBGqPMV4hQ9CBeAlCiNvJkLMnDOeHBTUPojC1htHS7HFWBpX2YdvFzPObFd7gDbHoUwyT46b2cU9L8HJ8qKf+RFtO4H/Q506jeyklnqS6Pp7oHyIyYW2TRce2C08UReRpJnSMIBbignhAiMjYE995Py3wQff1sFGY+94KfF0WXp3lRVZ54OgQxSvTNBjTCMXICBBYzsQ4+QUKFnKUwYlk7x9zJyM1J2mRy4b3SQP8NIN8sVguAWkWAHMZyNhW+voEmFALWkPwlj72+GUOhhN2947oDg6xWsLJoLHGkWcZn3j20ySdLibwq8PlqBAtPB3UfNiYfRBtMICn9u8PPEs44mHyeGQDmxcFh0cj/vn/8k+gnrK5sUGWdSkqqScRJIDjWFPXhjhKSdME09QU8wm2aUizFK01SZLjwEduAgAzFFXF177+h3zxR38M3dnEWUukQMWaJE158onn2D04wNmSsMUvrwuh//74tMr//2k6jsjzzpKn1c99I8pHaZaQ5hnGOMqylLyF1tgw1LF4QldXe2RJzlyX4kyra0l4tc4XuHNYU1HVHanObQEU86Ji/2iIcjXKwXg0AqXIswStwdUNzhgarYiU5E308pw01iRac3hwRJTExFlCUxnxbgPz+ZzUF8pbW1/j6OioVV3aP9jncDhlPJzQmJo8yyjKQmStG8PlyxdRkea9d68w6PcZTaekWcRg0GE+L6SmD4rVtRVmZQmRGLmz2YxyLhTy2bRgtddl5+I5jIKVjU3mRUFZFBhjiOKU/soqG+ubbK6vE+kYi+b996+we+cmaRzx0mf/A3QUo7EcHe5RlBUQefEGRRwrHjt3jp/43EtURmG05hMvvMRTz32SOM5QWGxjONjf45WXv8Vrr73GbDahKgomoyGz6YhLl8/yMz/71/nCl36Iw6Mxf+/v/gK/8qu/ilKKldWV1jpPsxQaS9PUNKZmZX0F4/eq+axgMp0sarZYh2qLUi6tKVr5vUI2AYuDZkE/tp5eGhwzSmuRU04cJpLIjW2MBzW2pWsp5yDSPh1EQQTOO1tME+IeEChgwSGH0j5yIGC9tU2UJc0zIq2Zjac0RU3T1GJjoQSo46mwHj2oZRQTHC0usKtEMMWCV5cNwAiskaiL9mBhWaDTBRDk+8P5Tdm2X7vImXFLcxcfCVLOeSGD0P1BcW6J9K+WrjesqG7pdwRgKv+aZZFjE4p3gtDDliNDDuXvRaqihb0piWNm8zlJlCwc1MonJzgP0rSn2DmNigRoBZAculmnKX/i+3+Iv/E3/jr9TGzhNFJcu3mN/8/f+Tu8f+UaSRxRO0dipKe0E3Eu00i+Vt0YjoYj0jSh1+2BsezeOWR0MGF1ZUDe6TOclDz7/AtsnNlpKY1yOxpnDHEkNb5MNeEf/IO/zcHePXr9HkUxp/IFXa3POe72M5I0obY1WRaRZglVZSjrmjzrUDe1FKluNCsrK14F2WBsw42bt09bvk9tjwRsvvvtrzOfTRdeCLSnWkUURUFZVWR5ThzD+cef4JOffZGDO7fZ27tJmiZI6CwiSXOy/ipZt0+SZ2gE6Vpr6fV6rAxW2D5znstPPMVP/cyf5/y5LZRzmLoGBb1uTxKJnDysKNLi8XfOF9Nc1KBRSgpfBfASBrMO1eVlDWjHcZBp/SCD64F5Jq6dBiGA4lt4w7XvSXj62EEnfj/ZFl4SuN+4O3ltD5s3cvL4cO72W0/83jQizxtASrfbPUaPas9hPD9VaymAGIk0eKBOOSfh1QX4OWlQHjf0P+jaH/T3/cffd/qlFVS1WLMNPTtJFBevV7PwVHhvlHdPLU62DE440afLfdmCEs+fVeHaj0dmFoIUx9uDgKZzC3DWuquQxd4Yi7FyL9aEqNkS6DlF1vskxS3831lFpOCzn3lOFJCmU3qDPkkSk8UJnbUB2aVtybNpk1ZPmuAPjtSc9vqH0eXarm3/v3zNHEuOXX72jg+fI86JAIAxhl//lV9juH+bnbNnSNKU8WiCsYaqblBIZGXj7GNsnL2IMo66LnBo0JrZ+Ijrb73OvBiSpSl53sVaS11X4WJoTMOdwyNe/8Yf8Kmv/BlAop8RmiRJObu1Qd5bYXp4R57h0r204+1D+uqjdrxVZQVKSwkAJGdFovqKNBavqtbKS5d7UO4NI1HSM6KUFgsFzBjra32EBHJoau8uUZGX4rZorbh46QK37t4jShJiHRErJfskjkG/S1EWOOOLLmPRCAUrSyIvaQ91I/K/eZ4KTURJ/sp0NmV10KepDNPpnLQTkq41RTHH2Yr1tZzJTO6paWoirWlMzbVrV9nc3ODi+bPMy5LxpKGpG/b3ZljriBTMgEQnHqTE9Do5eSfHNIaj/SPyLKXX73H+3Fnu7O8xHO4znQrwybKUvNNhOmooxkfcuxUTeWrWZDyjKWfYwnD1yvdI8x7KVMQKIKI2DldIbTytnVcjUzz5iU/ysU+8QNoZAOI4Gh+M+M6rr/PKt7/F6PCIsiw4PDpgOh0x6Gf8xb/8F/iJP/vTxGmHf/Mb/5Z/+N//Q4bjIf3BgFAHTvsaNN1ul7u37mKbBlOLrYOCpqpprKg46UgM56qsUEqR5Rl1I5Scqqq96A1CDwo5IsYyG09b1bHWKahAOUuiEpwzNJXD0VDNC0zTiMKdc+jI14RxTkITcdSutgt2RLy0Bp+wNPwWtlzzRUWKtTMbFDMBsVVRCs0eb7s7yaXS/n6UUm1Oy/Jq2ngimVZRmyOkoxDVcDjlwYpqCWZoD4Dc0uX5sj8+d1C1e2ygawUQE6KpgcQVQMtybg0BQIS1/6T6mQdKJ9MFhDkW+fwn/wENkVNtblR73tZBKr2tlvZS2W99eRL/mvjB1bF1XDlPptOKqO0A6PcGfPKFF/jpn/opXvjsCyLv3dTYuuJ3f/e3+Z/+p3/M/sHQ35NeqDZipbhqFMvv3tkOirJ01HVNmiZ085ymNuztHhIfjVlbX+P3v/YHXLtxnd2btxlPC8mPjnN2zm6QJHIPsVLs7u4CluFohLOWOBLQtrm5QtNUVFVNWc1QaFZWV0mTjOvX7zIvKpGebsJaqkmT1BfRnZCmGbV5+H3tkYDNeHjAq698i7Nb2xJqBeKsw/d94U9w985dytrwlT/5J/nNX/8VjDX8ia/8IHevXOHN17/Fu++/TW0aummP/uomW2fOsrq6gjaV31hknOzs7HDp0uO89Cd+mB/80R8hTzXW1zHRWtPpdNqohlYh70xATAA0gWbThkmX7qE1CvWJQavwG9VxI/3kv9O85Cc9pq3BGFyA7WtLC8uHedqX3nuY10+nVz2a5/Y4tUxAhzGOpq5BOSIdEScJSZIviniFe3G+ZgeKSGuSbCEM0EYLrBxzMnLwYdf6oHvjZM+rZQM6HMex91laTOS/Bb3PNFIzp24kcU2kNR0n1jf5bh0S3xe0PtvUTCdTptOpFIEzDTiJQg7WVtjc2mpHgRSp9B6k9v71MUBzkoL1UM/SsRRJaliIEiyM/eO1ePz5jyPxE89o0Y8Ovxk52WwuX94RXrb3grWeQRcSJds/CTQxTjgOHnR/D6KFLX/mxK2339Ea+O7kEbA8Bx90/mPX4hxVVXHlym2+/nt/wMZan163y9FIlKtKL0O5sbHFxaee5/adu7z96quc3d6kvzYAqynnBdQNz3/2i9y58T433/se3V6PTt7zanEhwdVRmYbvvvY6L37lPySOY8r5nLSTkcQRnRw6nS6TAxssjGObY/vXR+DmoZsKDgWtsbahNsaXHoj9wF+ihYLfI5SnEsVEXiwm1gnaSZ2m4LhItSJyDVpZn2cApixp6gqtE6qyEq+7dUSJpqlFEj1FEamITt719SWGFLbA1GahWhUJuSVNEp9E7sADsKYxJHHKcDRh5+y2rDE6oqlqlHI4SlQESarp64y6MsRxn3JeYpoGZSN27+2xvb3FoNfFscF4MgaViD2n5fqMdZi6wSpLY2SfiJOYPM8kuV3D4dERt27dpr8iNeSUawQQ1CVVOSdLM0yjiaOU4XAoe72TtUM3BbHVmGou2Q1Kk6QdmqpCa8fjzz/LF/7kn+KZT79E0u23YizzyYy3v/sG3/rDl9ndvUdVzBkNj5hOxlgavvDFL/Kf/tW/yqVLj/PeO+/wd/7u3+O1116n2+vSXxlggX4nZ31tzQsnKCIdc3ZbsdrpM5lNiNOUWGua2iu32QarItJuRreT4IxEt62SfS9JYl/AXCRtlVOSe4SmmM19fRPtbR3AgwUtxZFAKUxTexqe9Q4XQ4QU+EySGBW1EjtoJWUpak+Js0s+uOW9U7UOOnk1SWJcpDl34SxHR0MiNHVVEejNss7KNQie8mutbknVi/+3irMOpyyxjto9yZpFXRnZmt1iP0SypUWERjYb7V9zTop7y2SM2vNA2Lf85z3Wi5QAsbAihsKgbnHb7evBfjx19XQCuEJ+XHBqBmyk/EbpXABZJ3cc2nXDBXupdci5Y0BK+b0g1ppOt8P6xhZPPPUUL372BT7zmc9w8eJjpHGMVob5dMJ3Xn+dX/u1f8Vrr78mzktn/T/EaesUolps2mKiRJrIid1T1xVKie1QFQVZntHNO5jGcPfOXdI8Z2t9A1Ub9r77PYZlATjObGqcNsymcxQaEf1LKGZzOt2MTjdmfX0FrRx7+yVpGmOsJk0yrDUcDYc4DVmeUVU1DouOYnq9HtOy4GB4xKwoKKqaRfmDD2+PBGxW11aoqhnD6ZEYTdbRaWrGRcWP/Lm/yGc+/hTFaJ9XvvbvufK9N/iDf/dbXHrscf7jn/8v+KV/+v/l1W+/TJKlnLn0OKvrG3R6GbPDCbPpyHe8Ju2s8sN/9s/z2ZdeJHLOqyZYkjRpk4fErvQJV8YupAC9WbUMREISe4gwtIY2fjCeGHjLBs3JOiBBorgFOdAadMe6PEjLBjlFFgZH+K7W/A4LwyntQfkUy+8/KiA6rR0zMq0kRQZaTRKn5HmHKFbHFoE2B8a7liKtydKUOE7aTT2ozi1oRw++zkcBNfJGADGu/Vt+HAc8LahqV2QJt4dx0TTG08rMwoOjvMcmjjj59WFNCDhBK8VoeMh4NBaaXbfD5plt3w9eOtLBfDrl8PCQ9Y0NliM2spGJZyWo6Z0Eecf7INDCFnlNQjdoWmGC9nku0RmVUov+X3p9gXeWQAzq2HgNm0ULCJc8YAt1L8B7sk9G3pb/ts62CdUnd48HAZiHiToGZ0a7URy7Xv9zaagsbvsDaG2+NcZQ15Z/9S//FdoVnD3/GIeHB9S1cOljpTl74RJbZx/nG7//OxSTAza3z3FwuEeUJ0IfijQrK13K+YSNzR1iHXH1ne/Q6XZJ4oSyWkRwjW2k0retZI3DkUUiYWuMpSrK1kFA+xzcA6//o/bBLfSUOMMWibxgPJ8+FNeQUaN1Ih7wpI+xA5JUDICizimKRuamNXSV4elNy2bfYnDMa8uto4aD6Yi6qsjSWOjZSmqr1bUYdiuDFeaTGZUxKK3odQZknZpIRVRFRVMWpHmK1zhmsDJgPhMvqDWSf5rGMWurK+wPD0m7feZVwbzwxqmS+2uahjhJsM6QZjGRtXSylOl4ClqRJAk3bt5ibW2Vfq/LzpkzFGVBU1fi/20VngTMBdpqpDWbW1vcvbNLXRn29g4Y9HtCP9KOPE+l4J6xXh5eomXWNkRaUZWiJBfFmm6eCR1PC+0lSjQ6VgwGmzz/whd57oXP01vZxCqonaWczrj69nt84w/+kN07t2mqioP9fYbDIbaZc/7iOf6z//yv8eUf+ApFUfE//aN/zD/9Z7+I046VjRVAoaKIs2d22NrYwFhHORPHhVKKbHWdQafLarVGWRfMqjmVa0jzjLpuyJKUwWqfuqixjUSTjDHEKmpZKSFCQaRJkpjU1/JRicL6GlcKI3aFE4p3nMTyrCKNKUUmN4607z9hvGi9ZLyjsM6KXG4MdWMxWG/0KO+IXPiznAdC2tsj1hk2V9f43nffwjlxbkY+D9Y5BHgaI/nWfg9rjZ0lA90pRZKKTRDFUTvWcQaWwMYiYV8+63tJxMACEHHHc4MDttFLe47y0RrviiAYaQHowGJtPFnqy4ToD67d463/XLD1ZD1e7JPK2yFtQsHyvu2/awFuFvtqHGmwErXVSpGnHfK8y2BjjTM7Z7hw4TyXLl3k0oWL7Jw9S291ICJZSULsFPW85MqVd3j5m9/kD7/+dd5/930pKeAszpjg85JcHRsKky/o8jbYZBZQmjgWAQTrfK7oVOhqvV6PJEmwTcPevT2yNOPFFz/Le+9f4c7d20ymM3KnKIoK20jUJ88S6SXdkGaZKA1WFQpJMVFKUZYNxXTGvGywVvpFSn3o1nFTFgXG1+GyPof5YdujAZvNLbI043A4lAQoa3FOU0d9fvjHfozNHF7+nevknZznntummh1w7b2CrLfKn/jKj7C2uc3rr7zC0x//LP1elyyJuHt4wGw2QylF3l3hz/+V/z0vvvRZ1BLlJ8sy8lwKdrVSygqkwJRouR8bPkteCbU0mEM9lOAVfxDV5zRalxjBzYnCdwvqx31mhFrYj2GaybkWxzofElWL+XysPUw041HbccqZXIXQYRqa2oByQpPp99pjk0hj3MKzItE1AYpJLDrjSi0ktVtg6e9yyaI+dh0Puq4Hg5z2N5Raft7H4zQnowEWEZeQnCApHBWyCbWSKF0SR6de04e1o8MjkiTjsSe2fOHXUJnYealGuZZeryebiTVLksDKL/SnPGd/Q+EKQgQy1C0w5uRYPN5PC0LA/Ybvcli+pdVxErwsruF+I/n4OZef27FIpLx54qNe6nPp5Q977iejm6e9vwRfCGF81yajLt6V330Bu+X7fND3K0VZ17z//nXefvM1Hr90jqIoRcayrlHAuYuXWNm+xCsvf53xwW0xhJVjPptgq8qrX4lhliYxprEMBhucOf8Yd2+9RxwlRNFCeAMHs3KOq+cUpRg/SRxjcAxHUyk6jPWKRB+BmD9uc0o85OurK97Is55G4YiTUKCzQWvF1uYmcZwyqyyTWYyONrCmi1YwGlne+t5dtrcjtHNkbs5z5zPObQFKU1rYOoh47XpEU5UQRcRpih3PqGvZx/q9LlXVYKwCIpJYMxpPQMfEHcX61iajgwPiNBEnQSR0qdhX65b5rHGRGPqrmxvoJMPVNbU/Lo4i6qrCacW8mKO1pqoq8iwnSVK6vS5JlhKpiPLqTQ4OjojjmHlZ0u12SFLhws/rAp0q4jihLBsf9dGkeUY5LUniiCgKjiJHOReJ4jSTwp+zaUGiI+q6ItI5cZKgo5gkBqxUZ19ZGdAYQyfLJLch7fDYxz/LJ176IoOVLTG0gWJecPV9qUVz+8ZNiqLg6GCfo8ND6nJOnml+/Cd+nL/8c3+VtY1tvvv6d/jbf+f/zdde/jqXn3yMKBL6VrfXYWdnBxrY3zugLKs2kqKBREc889QTXL1xk+vX71E7g1GW7koftKIbCVWpmM6ZTQtW1lZI05SmFJofLAxhhVDW9o8OBFRksRjT1lOXlDfOnTeqtYIGwMmeqxWuaU1xb9SKMzGKfLQnUhLdwIBpjjl+w2pnnEgnO61aJ3GUCPVuPBxR1zVN3bSyx9rv/9Y5X+tEriBgm2W1x8ZYEu08AJD3bBPocSyuowU5fhPyzmIN3onnv8Mh+j0A1rVlOpYzhyzB4Ry+089zt6jREyIkfqdecnAvORaV8xQ41TqwcV5kwb/fGm9uIdAgAFHynSKfqpEmUroiTTN2dnY4d/YsxjpeeOEzbG5vsrqxRm+wRtbrkMQpMRYaoR3OZjP2bt1if3efu3fucOXaFa5dvcGdu3cpgpMrlDNx4ux3wQ7xAC0ossklBwEE56mEYY8RmmykNXXdUJQls/mMTt5hdXWF0ufH6Sjisccf4+Ll8wzHB2SdhNHhlLquyPNEzqtgdWXAdDLG9TpkaeoLFDdoHeNQJGnOvJxijVAalVJS6DdLKauSuq6JIw/oneNRKhg8ErBxKuLi5Sd4443XUUqLUdvf4q/+9b/Cmb5mcjhkf3+f9958i6PpnM+89AJndnLqYk4xh6ef/QRnLj7Jxz/7IoNuTkzNjXfeZDav0HGf/+y/+D/zfZ9/AZx4noNsZuIlcY/Vh/ETIY7F29F4+pAxEh5dgJL7gUFImA6vLVN/TvsX1L6CAaUIkYEHN7U8VZYMcggTQLUT7+SpHp2SdcqzOuE1XwZ4zkdSGk+9UlqRJgmdTldC4CwWEq0VSayxVSPSe1qTRCLJHKJYoW8WxnHrD1/c9rFfjwPHB93L4s8TXvXWu3Psho954QOQrZuGxsuyBjAbx0uV2k+JGJ3m/V424pc/o7VisLLSGsxKC40CXFtsLHw2eFaTQSYeSV+oLZxrMbYFAJlWpMAr+bUet9MN2ePXff8xx8FN6DMXdoS2HtKxMNUpX3U6SDrFwG7xxNIm7e/1tIjN8jk+aFyf+l4belpAM3XsEk4+Q44f94DvNcZQNg2/+Zv/jkQ1oCNGRyNJirSWze0z7DzxLG+89jqumYAKNWQcpi6xpkQ5jdWRvGcgjhKgZvvceY4Od2nKuchiugVF4c7BPlfefY/epWdxThRwJpN5m3itUK3L8YMitx+1D29xrLh394jRZOylno3fbB3GNeBpVpPZiKPxFMWMKM5IogRra4ybEyuhVqRxxmQ8ophPsWVBUSn2D0sUhgbLcBJRNz3KqsLpmOl0AihiHdHrdej3++zd3QWCHSVGgnEOFWnyfpemrpjMS9IkIY5jOt2MNF9lNikoixJTG4wF44eycYrptCKKFA5FWcxROIyDONWkqeSh6kjyuLZ2zrCyvs0br77OuXNn6XS7vP/u28RRTKQU+weHPPOxp8hLEYrp9nKKRmrKDHoDunmHo/0h4jgBU1f08q7fow2dnqKsKpJMjBZnROq/qi1JmhHpBFtWoCFOM/IkoagNTzzxFJ/43JfpbJ1DE6HQVEXJe++9x7e+/k3uXL9OXZYcHB1yeHDAfDZFq5qPPX2R//w//xt87gtfZjqr+IVf+Af84//5HzGZThmsDtBRjAJ63T5nz2wznxRYs6Czxj7S4KxlPJvyh998mcOjIXXToKKIqqnodLt0VjtEqSKOpCaHKUsmh0N6K31qL3/brvnei15XDUkU47RY7JKTaMEgVd8BZyyqMUSNpS5KrHOSV0HITXbesejaSMbJGnGRFuW4prHe2PXKo0AUXPtITqRVhtU1ya+ZT2aYsmoV19C6dWxaa32kTdYh44vWKD9exelife0cLZQx7z1zOL9+OZ/crxbUNsBZydkILazRSikBb8hxWglVzTlABZAitFKHJVKqBTDW+bhKeE0vOQB9nZ2FQ9rbm1GE1lJzL02StmTFYHWFPM/J8w793kDErPKMTpbR7UpNsyzNWV9fp9ft0Ot1SLKMNM3I0xTnHNeuXxcVM+fYvXOXd995m6ODA4YHR+z7KONoNGYynlKVhVA7Pd1ftm9FY/1rS/tXnKaeLuvQDlHkjeRvD01xCPhFS6669gBMIXZxnkN/0KcoCuq6YT6bi7paJXW16romimJ2zlziqaefoPv5mJe/8XscHdwliTV5FmO9eqI1BpzY8GXZkKa6lYKOlCbKJMXh6GjEyqCPA8pKKJcWYUyIrfW/UYHOmzdv8MkXv48rV96T8akzfvrn/nd83yefpT4aMzw85Or7V5iXM85fPMe9u7dZW98i0TGgmIyOyLt9Dm5f46lLZymHI7777Ve5/NTz/Nm/9Jf52PPPEvtBrXBeCjjyXu77Per4wa61Jk1TUl/0p6rqNs9AHvZJI3bx2TBB5YUlzLEEbNpjHW04UYym0z3Vy+34JS+MxgD0l43kk+d4VHrWg40bh/Ga8sFQjnQkkqHdzHtT5Jyh5k8w2pVzNMaRZRlRnMiF20VNoJPfc8zjH663/W2Z2rfUJ6fem4TkUQp1SmL78v2Gn43PK7GNB1qez9xJ8sUY0P7ZtmHvJW9/8GQtAZIPeibWmLZ+0sn3nTv57AO1UZJ7nYMkTdvEUhEnsF728rhi2LFrOAFqHkxB8mb9MfxwHNi4pXs+cRL+2C1EUdrOxW+oFqXiJS/ag6lgD4rinfpauCfncN5N1UZTT96jl9OUD91Pu1s+d1033L17yFvfeY3HLp+jaSxFUTAv5vS7HZ5+9hOM5jNMPSfLUqzfaJqm8hG8RooOWyuRG6uxypJlGdV0xuNPP8f3XvsGWZocq7NVN4bdO3dYf/LjlJVlPhdvmTMVzvq8jKXr/iBg/lH74DafTxlPhlhnyZOEct5QN4bYOWbFiHk54d7uXYr5TGqIAImLsFjSVLy1ka5RriaKU6piDtZiiTg4mpHH4hE1ynB7b850nrK6JRt9VYjKV5Ik9LoZcaoYrHWxxqIt2FL48i4Gpyx7u7vkaYJxlvFkQlnXJHlKf2VAlGfMbu3SHfToDbrkPVEk1FHMeFKQJJq0lui6VqJOlcQJSRpLlKGsiOKMoqrZzHIgZnVthd7qGjeuXSVSGtNYLp4/z2Q8xTlHt9tBa0Wv06Xf77G6ssre7r43FBVNU7PSG1AWJZ1Oh7IuqcoKayxpmhAnChfFjGdT6srS7/ZI0kwEEYzl+r0JL33xB3n2U59mY3MDpzSmtEymU95/+x2+89pr3LpzG1NVjIcjdvf2mE+nRMrS6Vj+9I/9BD/3n/411le3efed9/hv/tbf4uWXv04ci/G7vr6JRhFFMYNun9l4Tl354oDIelU1hmI2pShK6rphNpnSmIq804GmwTqJhEVpKlEnpYhiTZ7nTCcz0lRAcFmWAmyMCOcYU6NjidqF4tRREuEaS20Mura4xuK0wjjr8z4bz4j0hrpSXrBgEenAq8HGsShl1bVIPjeNkbwK78htvCR1iHS4ljru2D67zb29PYyxFPPClyzw6mtO7ISmaUjiGNf4tciv+ZIyZttdW/m1T0Q1HNbJjm69RJ9SImlvfA0d5fdHo8D4fUy5kI+qFjaUk4KegaGgCJLKcj0SSxIwprUmjWLiOCJJ5DllmRR173e79Ho9ev0+3W6PlZVVBqurrK6u0u316fV7dPKUXt4hSVNR9U0iyflxYgvVVU1ZlExmM4r5jPF0zHg45t7d2xweHDKejDk4OmI8mlAWBdPZhOl0Rl1L0Urpe+NtTl+U1NcuXKbotfU4nJX569kfyhK8+LJ3RsrLjHig19ol8h04T3n0SsFtQVRcm4JrrIwhUSt0lGVBnMSt+1ABTdXw/jtXOXfhAn/up3+W/bs3+eYf/i63b75PnGm2zmxQ1QXjyYTJZE7dKLSOSeLE36/Iije10GRnRdmC28YY5vOSxiu6caKG3we1RwI2V997m09++rN8/w/9CEf7+2ycf5qf+4//Is1kxsHhEXdv3eLgYJ9zFy9Q1TVvvvkGxsV84tMveOPZMRsd0dSW359NqWcjnv3si3zuKz/I5vo63SwVZRYlSlppmrTARcLZi2sJ/nlZfBavK6XIshRI5eEYQ9PYNpoDzvNSF4Dl/rYwbFtw4xbhSq184FMpMYxOMc6WDTMBMcGSOx2YfBioOU4hO90YPAmOxCsjRpKxFq0jX9A0asPJOHzEpfGha92CgLiV0A4VfIPW/CkGt3Rb+8uxIJX/e2HsLxtkJ+9h6RzK0caUlwzh8Id1rk34DwpROopI09QnAy9F3liGMGrpEuQde6LfTnr4T96vcwISgwfu/uNPB0ZJEtPUTYs7ZtMpWZ6deuxpY/ODXjsJahaHhvdZ6oXTc57CeR5mjJ5ywOK7OSWw5EIejHcNnBjPD3P/od1nyLd9DhJ98u8tnSfEW5ejqeEy/FT2ry7mf2MNX//aN8DO0XHC0d496kYW32ef/zguyhju3Wbv9nVWV7okaUZVzmnqkjiJwUi1eaV1uz2ZpqKq5mR5j3lZsLK6wXw6PO79dJbbe3d4TkNVSrFP5aR2h7E1qvU1LselPmp/lDYvCvJccgUiLYairh3WNBwc7jOebXA0HYGyOGeEBuQcg06GNQUxCbGZ0F2piHCYuiDVijxOMLVjNjUkqaFJNLtHc1y2iTGiZhXHCeNJQVWXxEmPlZUuRTmmGJZELiJPEqqiwuFompqqKsAkrOQ5abfLrGpEatnTiHqrfVb7fTHA5xXGWvJMHFlRpGhMQ55LpCXLE7Jc6sBJPoJlOBmjk5TN+Zy6nqPjdcbjCRgnERajUI2l3++wf3jkKSaObtahnFXM4zl7e/uMjkZEsaI/6JJmGcWopJxM6fQykkSSmeNE8kOMskxmU7TWTEpNv5MxWF/jmWc/wee+9KNsnj2PBXbvHLJ79w5Xr13l2tUbzKcTiqJgNJ1wtL/PZDoVT39sOXfuLD//N36ez33pKyir+bVf/w3+u7/13zIcHhLFMcY58m5GlnfQCvIsp6lrUSYLe4oxzOZTxuMJZVlSN5JXXMxLAYlpIsUDlawTUSpUOpSIAuSdnOHRmOlwQn9lgLaOei4OD5SnkVkf2UcMSW09XU0pkiTFuNrnL8ncj7SmAXFQWifS42qR4+IcOGspK0lMb0yNdVIX1FgHpvH7O8dYK84f6ywYZdg5e4Z333wXZy2Vz49wzmF9GQQJuDR+DbI4p3xRW3kzmKBOCfBXvqiki2T11Z5GpnzhWmOtp5g5tHJEYd/WkvQeaQH/aZyQZCmdvINOEvI8pdPt0el2WRkMWB0M6Ha75J0u/V6PTq9Lp5PTG4hQS5Ik5GlGlsp8TyLd1uaxTgBh6esRFbMZ03nBZHjE3VsTJpMJ48mU4dEBk8mU2WTCfD5nNp8xm86ZzwuqpqRpRHLdGetBpK995wtNho1RinTPGQz6sj8ohXPG57/IHhDUFRWAjkRmWiEUrTj2jtrFeQNdMDAkHMFelTEkzzpQESWqpVR07L3wFFXrG13s623eufIiCX4sHezt8+qr3+OFT3+Sv/SXPsb/+N//t0SpUGXzqIuxiix1WFtTlyWlF79QkW7rPxljSeKYKIpBITQ7a4Cgonc69f609oh1bGa88d3v8slPvUBRVPzkn/8LrOSxINLxlJs3brB3eEBVlFy9cpX5rOCtN9/g4uUn6fV70inOUc4m3J3NWF9f4aXv/xLGOJSK2+RfpSFNE6y1HtTgPa8+WctHE+J2MCwSqYNBE0yXKIqIIgkfymAyC8nepQf5YU2iB4uHvkDQLMIv/m+x3QPwUscGc1DnaOMSxwDQg73SD2v0OSdyo433/oZoVuI9VMGsM8bQWMk5irQsHODQkSaOYnTsDXbjlqhm93/3B0eVlg1tCHkx94OZB0Wggud9MaDbpH/jlfJ84mmSJKKPrxfP4r7THjP0F/3aBhWWAMqDQM3y+yJKoZeOf0CUxf8tAgVQVqXnxts26fZB4OlBTYz5BYAJXFqWPGf3Hf8Qr4W2vOl9aFs+j3On2trHImEnP8PDjamHuwxZJ5afybG2NFWPgzt1bHTXtocAAQAASURBVE7in9e8qHj15Zc5f/6Md5BUVFXJxsYGm2fPcfvuIQe7N6nmQ0w3Icu7VMWM+XTCxuYZrLOYUuo9zed3GY/HzOczHn/yaXReU05n7Jx/jHfe+JaA3qaRR2gdw/09NMZvQnJdTd3grFfsW3K2fNT+6K1pCpyzKO2IU8isotvp4GxDEqdcv3mLTi8HXZPlWoysKCaNHEkMg7jENCNsZCiLBNtU0BisM+wdWrAVg14EXXAqFkPTNN7Dasgy2NkZ0OkkDMf7NPWcJNJU8xqVJNhIYt1pHJFlPVxZU9elyEsr8ZCPRxOss3TyVA7WGmcsaSIUyNW1PuKtDeuQQeuUsqjEaxyJU2s+n5NmuVBfXE3dlGibolF0iFmtNBMqkpUBGLCNqLRVVU1ZVoxGI6qyxLia7e01NjdX0UpjbMrhcI7SijM72yhiJrM5UZyTZut87NkzgCWOYjpZwvkLl9k5e4nheMr33vldbt+5zdHREeV0RlNVzKZTxt6wLMoS65Pb00zz+S9+nr/+X/wfWd85z2wy53/8hX/IL/2z/0Vo1T55uShL1tc3EGMtQjkoy4qmqsSAbxrKsuJoOKQo5lId3Uc3pFZNTFM1RHGEixSzomDNF+BWHrgkeQpKU5UVVSEiEU3T0O3noBXVvMZUTQgcC7jwoj0eD2CdSMinSSqUK7VQ4xSnjPN1cWj3AeX3hSSOWFvtorRmNJ4xHM9xBKl/Sc9XSrfGqlKaOFakvZSVfp/R4RBlhZlARFssU5wsDqylpW8pR6x97nIU4RykiexxWdqhk+fknQ5JnpFlOVme0+13ydOMTpYLOOl1yTodut0u3U6Xbq9LnmWkaUKedUizlDRJJF8liom1Euq3ijEoatPQ2AZb1TRVTVlXlHXNfDYT8Z57e4ymE2aTGfP5jMl4wnw2YzKZMJvNKYqSsiopijl1VXtneNP2r0TIIkKmVKSDoqm3D5GoUhrHEuHSEpEy3hls6prGixRZZ32tq4bp7C5ZlnvKudiwLbuhRRheKMsJ0EyTlM2tTc6fOysRHr8XhGiHNUuS1H64LOwitdiD3QKoLLgLakk5TqS6lZbcKq0jn4+m2rEjSrkxaZzwztvvoM0RjoL5vCG2KWVdMxnPibREzLqdDvN5iWksk8mUKI7odHIaI2O9bgQQahR5llPWjeTR2xN7+Qe0R8uxcY633/wOOxcu8dkvfpm19VXqomD/4JDxcMidu/fY29vj2pVrjIZDkiiiLuZcvX6Dpz/2DHEEZ86co9vrce/eLT7xqU+gXIRziiiWcCxaSc0b/wBCVCHQv1RIPLZIspyVEOrCPlna6kPximBQE0QD7pfSDYZQ8JjeZ9CGR750/pPnOJZ7sRyZcKB8tRwbPgMiCdg6jwMCvr/Pl84UAiCttzY4qxtj2pohcRSRZ6nQ9wJ694tg3cjCHUdx288CDrSPzohR2Hjv1EmQfDJfKRixyxSycN2L6Ez4zP19uPh7cYfL4EEp8USGHCqca/NkIp/zozyY+SB7+JgRvzw/QiiJ46DlWN+fADz3v/9w4ETuR2OtbMI6Ur6Y7OnRnQ+6hwAQ2s+548c8iB75gd/RemdOnGvZfbM49Fg7Fp0J2GYZrIeIiMcaYXP+o4KYUy49XAlglvrn+L0cv79w/OKYsDEooLGW996/wdHBXTaeOM/R0ZDKy2I+/sQTTAvL/t49xkeHOGcp5lP6K5uMh/tYW+McvPvOO9R1KZuUEsfK9s5F1s+c59rV6xzcuc5jTz9DknbAVsfuZzqfCXXDGhRiJFSNKLERvOynPK+P2qM1YxoUFq0tcaRptOQdZllCFEfMpzP29vfodBKUyzg6nGMahzUdumlEoxuatERHEWVZUjQV2omHvGcVnZVVdFZTK4NxmvlU8mB0FGNpIJkznx7S68XMxiOm0xHdbEBvkIvSVp7SWDGKlHXUXgm0LCtETl7TH3SleKWvTO6sEUPaNKAdjalFCMGB8RSQ2Xze1lUpvIiAaQzz2ZTrN64wnR2C3qSYVD6R2+FiRSfJmI6mYC2zoiBJE1IVMRwOpVZLFvPipz5B2lGkSQa2w7mLa/RXNtnY2mZlbZW82ydLc5zVzOclo+GYw8NDRsMj3nrrBt/45ltsbmxQW+Mj8zWTyZT5dEY5n9M0DVXdtEWv0yRi0O/wF//yz/ITf+YnSTpdbl2/w9/8f/xXvPqtb5FmqTfTfGQk0XQ6UuVc+zFgrKWqK2zToHHUTcnKoM/21ga9TodentPtpGxurjI8OiTLRKmQWLNnFRNnxIuOOGd1ouh0MmajKfPZnCzPxRmnFXGuUTZmUlQorUh87QprdbtoWYRt4XxtJKckd0YljShqWR/pCANZhWLjis3VAaurHYmIGcNKP6OuDZNpIbVREHZDFAstKIkjkjghzzI2z2+x0Vvj8fOXYbuhfrwkTzPyNGfQ77O6ssLKSh9rDCsbG/S6XeI0IYulFEQcJ0Q+GT1JUpIsI4kT0jhGedU3rSRvsHaSl2ONCBRURtg1xbxgXlWUZcHw6JA7s5vUZS200bEA2rosmU7H1HXFvCyZz0vKqqIuK6wxzKuKujHYekHha1onUXDcLdlzqHZPFQe7oE0VRV7SPfK1rljYPErGznw+pzYWiU45amMEmWqJZAVJ99o7DBU+kqIVddmQZaICmKjE1/qR79Ja1OTiSIr1CviM27zwqqpaUBVyVwUAeIEGpb2oiNeZa/cIUWWV+4z8e7Y9V1AMlnMFI28B4JSX9o4in+sVi9PcVjOK+R2m5ZxeP8NhKcoKYyBLUiItBVmzNMPGllkxpz/oedAuX6O1OGy0d8Q7HVGE3PSHbI8EbIwxxE3Dy7/37+mvrLLW6/P4uW0msxk3b17n1u1b3Lx5E3REf22dwdomz3z6c3z6c1/m6ccvc+nCDtVszK/84i/yxLPPMK8dmysd6uD5tkrQeJy04ENya/BFiiTpyFor4iB+gKhjBZbkQbjwdPSyL3bRTgMjwLFcntMM2+W/Tx4TmvVJcBBAWZs2vWQgBrrOQvIwhHs9Cjl+wUuRqcDDF4Qv4egkSenkGVEUpiftBDLGUNcNyieHBmWQSAu3OFSTtVYKUrb38pADaRnALL+2nKckfbqcJxUS/lgKo/rn60IYXRL/rRUwE3lqXBgbx43iB1OrQl9I/8n3LSfqh8UsvG45HiE6pvS29OyVUjR1TRLpU8fEcmsXy9bjJr/bU8bW6Z9V3kPn/Sg+GvEgY7at4XTa+x5ULKOTZeAc7P02UoD3ERyzo9ubuA/lLEPx+yJXYW58wMM6Gb38oGNOA5GigxfAzTIYOz5Gw6vLlFEXiuY5aIzj2y+/Qq+TUtc1ZSmb6aDfZ337LNfvHDAdHzEdHwGWspyzniS+0FjjDcYZKOs3U8XGxhmeef4FDoZjDvduU5VT9nfvsL59lt1bV9BeEtbiKKtqwYF3FuMs87JsqQoezt/ndPkI4Dxac9bQ6SQ0DZzd2WE8HnvOt2Nvb5+6Lr3gCKz219hc32ZvbwSmoTa7DGtDYwpUpZiVc0mMjUTufTQ33JsO+fKL5zHO4GhIk4SmbqQI4kAzqxrKqaVpSgYrfXANZVlCLMnzTSWJ25HPxbOx0FSssTSNJUo0WCUJ5VYUwhrj6OQpSidEGtJUaI5FIcUd4yTCWMtsPm899tZasjxFa8Xu7h2KZs7BeI96rKgjQ5lAFCn6vQ4Ho0O6/S4qimicgIX+ygb91VXOXjjPxcuXWFvfJM+6OBsxnZSMRkPu3jni7XdvUVUV5Vz6SvlaLVEUcffuXWazOVvbm60nfTKfSZ6HMRBKLiDjPNIRaQyXLp7nr/383+Cz3/cFTGX45te/zX/zX/8/2d+9Raeb+dpbPmG/mbO6sSnULhRoTZZnrAxW6Pd69POMqiqpi4Kj4SF3793j3t2bVEVBJ1OkMaRJRCeJiGIYrK9Q5OuQ5kxGFYmOcEqeTZ6n1EUpBV6bCqUi5rM5/aRLHCu0tm1RVTHqlNgzSmGVRZPilKhFdbs9siQiAhK/b0e+fk0UR8SJlFvI04y1wcADc6Es6SgW9cU4Js1zsjSj0+2Q57mAkiwjT1KyJBVQphU//gM/TBxWGKNEUMEaGlPRWMN8OmOwtkpT11RV5dXTaopizngs0Y+qKJhMJ8yLmtKrbM2nM+pyzmw2pSgLyrKSPKaykqhZXeGMwTaNRDiU8qwRvFNssddrMTQQXYNYQIFeKKU2xmAbyd1xSnkwAdpp7xT2+UG+OKyxFu2dpHEcES3Zos4rqYJ3XmvNZDLh1u3bWGNYW1vzktjQyXLZgxBGTNivIhUd3x9RZEnCYDDwjCPnQZf2DlNLU4cCvw3OIbkzyrbbOErAVojyYOU5OZ93o5QSUzjYy97ekv1Ct7aqsNxcaw+GHaZVr1vaky0QocEVOBzFfEZVzllfzVBRRRRFZHlMmuYoFXPYzNjY2MAay2g89n0uVNXVtRX2hkPquqKuDXGcMCsKsFLM1QW7+BGQzaMBm6bBRhXVfMJv/8av0FQN8xdfYiVPuHd3HxVnfP+P/BidbpcozTh74THOnr/ImTNbDHoJ+3ev8+v/5J9y53DO5U9/lnnZsLt/RH9l4I0dSydbRGuCN157g6qpG0ztkVsLEBbiiYt7X7bYVBvhcK0xd1wP/SQV7EGgZ7mdllNz+s/wb9kYXXgKlu1EqcLrDcWlIooSQanBDzqtfGXWVCItUTid/06tIqwz1FUFzpKkKZ1ed2lyKY+Kw2Ivi8aikOPimo7fr2snAWrRB4tap+EYdfxzbX7RUv8q/ESShUl5LfzaBHU78cTpKCKOdatvHsDa8vUtG+WL/y3eXL4eY4RiUFUiPYq1rTyz9sBGtwmGoUDf4vktJ/YrJfzQxNMmTxsPJ/8OIDDQnZbfO26k+4sPQAbaGjXLxuxp37X8+rHxfaLfTv3M0i/Lx7tjf/hfl8bccs7Mg0Jni8MX4PaB1/GQ4Cb8XPRByLMJwHWp75c9ch/4XQrjDPOi5s3vfZfBSo+qEgpa01ScO/c0tVGMDg+YTUY0dYlDnAJ1XZJlXYpisXjjHFGc0O2t8PSzn0JHKUd7uxSzIUpDnqWsbaxz98Z73sj0HGtjJEfHX68xhtlcoj8LBaRHcGN91E5tk+mEfr9LYwzT6ZytzS3KqqAs5kwnM4qZIYocnTxhY22dM2fO8dglw3g84bXXXvNFEQ3TWUGeGqxRkMicUNrhrCLeeo7VXpef+sQZDg+GvPb6Kxwe3WNWjzk8OiBJU8py7nP9NMPRhE7uiLUT55ODshYj0FSGfpKhrPH7oWU6nfvij5qV/oDpbIaxlk7eAQex1hR1hcIxmc2IvApfVdWkWULiVdF0FFE2BaPxkDSO2D84pC4a0jjjsBgxdDm5a8jzDr2tM6ytb7K6foaNjS36eU5lHQejEXu7+7z19neZTqY+X8V6b73QXOJYomGdXCg4RVlx5epVrLWsr60xGU+YTCc0jZG6at7rHtbdONJkaUInS/jUZ57j//DX/0+sb+5QTBt++7d+m7/13/3X0JTkmSbLIlb6m5KMXFc8c/4pnvn4C1y4cA6M5eDgiNs3b3H12hVee+UW3azDlavvU0zHlGXpI63euHCOJDJo5ej2Olx+4kl653bYWNtCJSlJkqB0RBwnZEnqIyGRF5nR5GlOmkpuk9Zi5EZxRJ51WuWtLIrJvDSwiqWkQprExKmcJ1YxiRY5YYm+iAntrKwR1she0TSGqpLolihsWpq6EhGMpmJyOGevmEn+UG2pyopyPmU2n1GUImdvG0NZFAJEq5KyrLxSVklTVmSdrFVYreoGrSOqqvZ91viSICIYEACJswhdT/u8H7yTD1De0HaEGjbefmptOHWMktTWLlNKjGwndCalRSRBR4o49uU2FCgViVCDE7VB6x3LUk7BtUVBA8VKa91SyF1IjZBvxhip9fTYpcs4HNaY1pYKa3bjqfPGGH+NNUqJkpuYAo4sy6V/faHXNgs23Kf1EtZq4XwVoCZ9aaG1jRbOV7tg23ibQ6MXrr3geNaaEMlp7QpPpVS+rwVcSP5O5HPIUIosEsn5yXhEU1d08pi6LrH1nCRJmU8txWyOczFxnHrxCEWnI8GM4dEBaDgcjb3YRYxzWoqbWzB1jTY+3z6JW4f8w7RHAjYiV1gTRQnNdMxv/9ovc+vaVT7z6U8zL0tWz56j6xUmVlbX2Nxco9/LmU8Pee13X+a3f+M3uX7tFi/+yT+FbQymdoxrS29lTSQMfR6MUuK1cNa1uRRuqRjYsQhMa7882BBy7eQ5/qFlA+eDjKiTQOfkZ5ffO52q9sGfa3mrfvIdLwwfJF2DwkWgyi2bh56mFwmHtyjm6CiWxEitvENiKXoSBv591yJ9c7wrFnlN7fV+4Pg6BfAtWcsh+hbOqbXwQ4uipCzLdmFK07RNnBOvzOL0x9H7AjQ5FbrLtZEgpYQTPZvNKIp5qyRjGkMxnTGbSl0QpSDxktdZt0NVlqJMFEVkWUaSpYToDq0h7av7evW908z040BRvEYn6Y7Lv7c/W6G+IGywFNX5IGRy8juX/74Psy5tCg86ZzgEd2zMHev+4Ex4AHI6BjzDGFjCQQ/bPhDsuMUx7T/Zju67joc5tzWW6zfucLR3h97ZdfGCVzVpkrB97iLD8ZRiOmI+GeGsacfldDqi219hPh8L5UdrlNMMVta4+NgzdAbrjEYTjvbvgDMkSU6n05VaGXGKxmBtWDfCs3NtNLUsCq+M8EcUePio3deMMUxnUzSaq1evMR6NsMYwnk5xjfPS3oayaHinfpfDgyMOjoYoIIqcJFFrMXzG4zHOKaoko9/vyd4TRdy6c0gxu4Oz73P9+nXiLKbTTzm3fh6tEnb3DshzTVNVRFFCvzcgTlO6nR5lUQqFWHkJ2jjC6RhFjEMSuGOlSbNUaHUKer0us7KgKAo0iul0TpolrK2tMxgM2Ds4Yjqb+6hNSa+r6OQJOlYU1QwdKeraMJ809Ps9er0NtrcvsL1xls2NDbI0Y94YhtM5t+8d8e67t6jKOWXVtPW8pAn1StbiUD9jETGv6obZbMbBwSFJ7CuOT6cMxyMBM85R+8glCuHpa02eZ/SzlB/50z/KX/xPfo4s66NxjIsZnX7GX/oLP8P66oCtjU3O7Jzh7PlzHO2PufL++9y6d41vfPNb/Pr/+svcvXOb+bygbmQ/lMKXwgzodPukWQeFIvFOrk6ny2c+8yleevFF/sSXv8SlS48RxYlXSw7GiBiKDT7XtZHxgxOWRV01VGVJ48SBV1UVRVFSlBXzsmBaVlRl4QvxipjBbDZlVhTMfMSt8SyMsixEjKSpaBpLWdc40+CMwRgldpaRwo0K2XtsYxdro6dkOevrn4SnpoLgzyJq7xAAE9ajpqro9QfeKRoUzhb1aBz+dSc5JRLxD15YKx557+1a5Ig2fktSi73FhaR1hcK29ly4RhAQFNZw6wBjRF66saRJSpKmoESZzRqpkSSO00WuK22vCMWvKEpPR6ONkotqrBWQbYMTTa4hFE0OueRhBxKQh1dxW2IaeDA1nc3Is2xpf6fNmQnOzHBdyvdfMImMbRb8EhWiObIWBZvZ+dqJyorTW77DEECU83NLOS/l7R1ySkXts0E5H1UsBXRqxagxjEZHKODJy4/x3HNPcnRwi9ffuMVjj13CuIbxcEhjJW+0qaXgcFmUjIs51kKW5UKHJRLlP1OhVIxpqlY0TMaQJUnyh17TH008oCpF5cFTirSueOvVr/PeG6+wuX2Gyx97lrPnLxIlmmpvzrV3v8fda1d578232Nu9R1HJBLSR41MvPMfk3j3evzeRB+8cSRq1Bq+xtjUAF2vGcUCiwsM+EWkB7n/NW1IhQnDM4DkFuJx871GOO/7dJ9TM/PvyjmqDGccAkZP/WXwpQaU4aaSFw4UjrCjLgsYYkQ3t9dsDBJWHUKqfTA8wfoLBubge1QKqxXffT/n6IMC3fIzyVXZRQitsTMN0OvN5C7qlAR6nr/m1MBiex3HC4ndcWyA89G1dl0zGE6qqBBQH+7t859Xv8Nor3+HK++9zeHBEMStlE1YiO9jv9zi7c4Znn32Sz3z2BZ782MewVnjneS5a9DYsCM6R5znjoxH9tVVOVe1YWqwAyasKvid3/CYCCAzjJlCSjp/uEQzX44iijZQtvx28Ru7+IX38s8unXPYrnDjfKR8NE3XpFeH3PgikLHueTqOFnv45d+JH6EuOf37pO5ZfPzmGrXW8+cabdPMIFWmRkG8aVlf75N0+t+5dpy7nOGuIYqn4Do5qPuP8xac42L9DkmZ0OgOSWLGxfYGVjS10HHH33m3msyOU1py9eBkVpyRRQqc7oC5G7djXSSIy7d4wqBvDvJwghoVdPL8TffdRe7R24cxFGuPpEzspg8HAq0HVTCczT382JEnM6uoKvW6H1cGANMu4eee6FKbrdFlbgZ3NbeI0bxW0tFYMOhllabFOgMv58+dRWmFMjSkd88JQlTXzeU3ti7hunTkrOQd1RafXwUwMqY6YTktsZanihlRLNXutNNvbW3R7ObPpnDRNaJyBiay3vU6PPO+gItVGas7u7HDlmly7eJktZVnT6XRZW9lh0F9lfW2Lza0tofiQMRrOuH13xNWru5R16fdUGW+RjlBOqF7BRFRIgUYaMfGC6hwoqlLqYcyKgqOjI1ZXVkhiYWukaUK3kzM2M0xVtY4c7SQxO0tjVtb6/NWf/2t8+ft/COUilBJv8Op6n+/7/Es89+zTvPmdV3n11VfI84w4ivjWN7/Jd773htBMq4aiklynSGt0GoHW7JzZZmWwKgwJF/ZXcFiquqHX6/PkE48zmU75lX/5q5R1JUZ+XSPSyjVNXVE38uwaY6mCrLKz2MbQeOU1vOqVcZL3JE4zi7ILxUPngrc+/O3XNO9Mss7QWhNLSeQArkULYqgec561dsdCFSyUuVQ6qJ8i+SFBCQ2vRIoD4yiqmriuUUT+KxpEjliuQKiX4qTWnuq1MNGDDafba/O74tKaZlsgFGrQLBx/CrQHDw6sd2RqJ59zShFrRRRJcce6rtFa0xgr5S6gzSm2bhGpMU5ATSiILbkpciko2n1fh/1Fen7Rp37/di7kO/myFW4BUKxb0MisEynu2gOrFgSFXmqftVrKY5XonFUOp6RKD85H7Jx3svm8TGed1EhyokRo3ELG3LHEwPGvSfO0P2r/t8Jpz2hB7E2cZTqbc/bMDi88/3GevnCOwXqP3//GHVyjSHSHPI2YjgrSGCaTCaUSZcXxbOYj1BXzWSE5TCqiKEsUosbb+EiZ1iJLroLz9CHbI0dscPP2gaU+X8PUNTevvM/199+TbvDFfiTfR9E0FcY2XqrUsL9/D1PPmY6GxCpGR5AmiSQn+kkug+aY5bowhnxEZ+nxy/c+wMA+xqH3BmUwc077zB+Hq34yYrN8igBC4FgA4vTzIMmaoiR2f6Jw4OIX8xnOOa800m0nYlsdl8VEWxiXavEli9lJoPQdz1/5cIm9k9EGmSvKL1puAWo8sHHWMpvPaBpJtux2uu0zWs45uS/asWSonwQRC9AoFd5HozFVXWGahtde+Ta/8av/htdfeYPD4ZjG1ljMUr94YKM0ewf7XL9xk29+6xX++S/9Go9dusAP/+gP8OWv/BA4KIqCTrcLoXaAVsRxRDGdknU7D46WhN5sLW3Vjtzle23D0MGzufT+SWP/g5o6PnUWz8VxDMQsJ/Q/cDA+RDttpiyghEfMagE4HvRlp0U8H/T+g64jLNrH1o/WOfDh89t5z+pbb7yB1opiXlA3JdY2bGxuUhrHZDzENhX9fh+tDNPJCMAXva1YXd/G1BWra2tkWcbmmbPk3R7zWcXe7WtYZ9nY2KLTX2V/d5e1J7fp9focFiNfXA0v9yo3ZYylbGpmxRznbFtl/kGU2I/aw7f11XWSJGI+n6M0dPNcojQGVlZWkArkmrqpSbMUv1hgnCOOEtI0o9PpCS01ycg7XaKqIk0SqqoiSTPvCrFkWSpCRM5hrWY2m4KV4r3OGZ83kdDUjlt37tHt5FgnCpdba5v0+2vs391nZWWd0d6QuqmlZoUHOP1BlzRNKcqSNE3Jk5y6NkRGogFpkqD7A/Juj/3DITs7O+g4ZXPjDL3eqpfVzWhMTdU0vP/uXarK+OhUJOuXUgQzR+NVn7DeCedarXXrI4vOOYwVg157mou1cHBwgMOxvrYGQFVXHixZsjQlXk0YTadUpdAvY59DUTeWra1N3n33Ct995XWakJ9RVcJYKEuJ8tQ18/kcnKOqG/Z273E0njCZTL1c8vH8SZRiNpmglG4jE3IfplVEc07x+uuv8ewzz7a5CDqKsN42CXK8hL5AjPDWAHcL+pgAB5Gzbe2BsE45fF7QglkRcolbTTNfil3hoxQ+ehdcHt7kFXSybHCEZ+hEaCL83bIyltdOb4fpAKpUyMcVW86YGqWsN3kVylmU9kJIiOGNBx0hEiDKsG0nCbDQmhCddm55P1etQ9O1nweng010nJhd++R9wrs+WihDUdbLONIYa6h8EWzbNL5IKEv2B8isX0S0BPR5G8Z5S8ot7zSS3YldcloqvPgLAjJUmBfSrJG+k/MrnAfo8giVfxoB9ITIp3/Kyjuelew7baFOP16scziDL5oqr4v94Udgm+8aHMiORc/5MedFAoItq5WS/FHg4x9/nmcuX+Z8nBGPJpSpFNq1aNJOlziKWF1bx9maPNX0uymj6QxnJNev1+1SWYPSEXUhqQG2gVgr0jiiWCrMGQWBg4dsjwRswFE3NXZmJMSZZCzLBbZt2aD2oTSRsZNF7da1a9y6uY8xEfNZCUcTkscTmfi+SFSYicFjsux19mdnMbQ55pEIn2uv2g9oG0JzMtR9EOd0UHTSYHiQkXXad36QQfYg4Kna+wsHBFDTXhUggKZpGuazGVopsk7nWFTlWKTjBDg4RuFbOqcKnahCrQ0/odzimOXrWiScLfpOKyWFsJQk7JWzKcOjI3Z39yjmBecuXuBjTz9BUVbUxpLEKZ0sanmTwcAHFupP91G2lgHaKU0pivmcyWgIDl555dv8k3/0T3j99e9RVzW1qTG2lrHoQ9oynhbPTf5JImtRFYzfmvH2+1f51//6t/iZv/TTvPjFzzOejsk7HV812JL3uhweHvo6BlG7sDwI5OhQLTpsPITxoVDKtZLWJ2W21bFBsgQAjj1Nec8tvdIKDyy/vzzGHwLQPAhQfeBSs+QNWjS9WLNPHv4AwHFynn3ARRL2kw8SQv4gQAMy/sbTGXfv3qKfJzRNTdPUqEgz2NhiPJnSVHPyPMNEirosSZLSF+U0jEeH9FfXqWZjNrfPsLq+QW91HR0n7O9dpZwNyfOcs+cfY3f/kKeeegqd5KRZZyHT6aDbkbwPkPVzNhFFqECjiHSY9390R8xHDbq9LkkkojUOR5pm4mF2diF1qhXW5u0Y1ZEAocFgFeskATzPckl+NobGWkbjBo3CNjUgtBtrDaap6HYy6qZhVs5QONI4Zf/oiNW1NZrGMZrsU5ua3f0ZF8+dJYs7NI3IuK6vr6NVwqxq0FpTN4Y0FbGCxCtdZmmK0oo4SiQHcA7FcESaduivbJB1enzhCz9AkmSUZcN0Vkrit6lxrvZLjCKKc/qpz7/REMVSEDBEV5yDqixbJSpxPhrqRvLNnLWLPJnGYI3UkyuLiiRN6PY6lGVBWNuVigjroHOKQbeL7veP7WUKuHt7n3t3fhM0xF5GFqUwTdM6nCLlK9CH/R/Fxz72LMPhEe+++y5VXfu1xbaGqlDDijYfYBlo4PNAD4cT3nn3PZ5++inwqlN4sGpYoni7gCNC4Usxfe2ySW4dQguiBVfB8RqADSDBCyd7g9JqUU9uaUtYtmXaPA4ldZkI+6b25/cOYm8ze2ehaddQSaG3fo0J+yNo3YCx3jFoCHEYAujwqmdKiZJXkiRtVChKYpJI5LZ3d+9545/WjG77RDmsA6VknVtIGQeD/7j53SYnKM9w8eILIPSnMGhkb5CaOsZzwxY2qlpyfoVIkQNfWyfYJz4g0l5Hmz7QPo8AkGz77MOehBOIEi7c4YFGW7+H1pZ2zoHFS0rLMwj06nbFd14CGtc+X+Udpsqp8Ehadj5KCpi2qTeR8raBkwiYAyWi3q1N7dGM2GnWUpuatfV1nnnmGda7fVZLQ1LMcNpxcDhHRxH9QZ+VQZ9Br8PaIGE+n5Ammxzu72MNrK8NmJcFSZagjWI0nMkcslai40pJrTAPQmVenHBUfkh7JGATlJaMMcznU+q6blW1lo1DF9C1PKGlRUmxsrqGcoqrN27yiWefYloNmYxGfOfbr/Hss0+xsjI4hpDlYcqmEJwhziNO5ZG1RHEezpO9AEaLtgxwFvfwwdGc5dce9Pqx61ka9Kdfl9xvmO3Ov6jAAwahm5XzgiTN6PZ6y5++D9Cc+h3txS5szvuuO0y4cGjbv54eo3zo2Ao3eDqZcLC3z73bd7l18xa3rt9j994B49EU0GzvnOXy42f5F//i3/Bjf+Y/4E/9yBfp+ErACkkeNMa2i5SznrIQxdRN8cD+Wm7aSxCOj6TmwNHBAf/D3/+H/Pvf+n0mszmNqTBWqES29ebTLuKLhYr2PpWRe9VaUyYJb7xb8Df/5t/mx3/sDX76L/4MzimyPCWOY4yzrKytcbC3x8bGJiFcfVqz1nrPpxTGUj6CVVW10J2sr88TxV7ucSHleNoTDdvKsXs4tgCEh318Tp7WPohq+UG0sZOff1ALI9spt5Qzd/r5/yiOhfaZnrgM59ypVLvTrlfWAsvtO7uUswkr3S5lXWB8YcN8sMLu/hhrGiKtmFUlcZLQ6fSZTI5AWUbDAwYOymJO4xxJ3iHSCfNZwbV3vkcUKR578lmKyrC9scHFi4+zezAiyVJvP4mU5/rKKsYKxdLWhnfefhtrG7RP9I7jmHaNdMGB9PBFzD5q0u7cvkue5eActWnQijbCYA0kaYyodkpOXVEUzOYFk8mUXq8LiIJj5A3OqqwkiqJEJDmOAq9d+QLUoi6Gc8zKuacaaWbFHNN0aOpSJJoLRTFz7N4ds7ayymR8xOHRAWc2t8lSmBRSzyJqDI1xoC0xiiSKyDsdrFU0FnSk2exvsHPxaXScUsxLZrOSsihwrpYk6Sil1+mTZqmoZyUJebdDhPbrUykqVvMZTSO5HXUtogBlUVCWZWssys2K41ApUTpNU4l0NT7fxK3IYcFB5tp562VoW+eaa42s+x2Juq1GX9WiGCUgE5xVNE64/dY5mroiSRLquqLT6fD0U0+zt7/v60rNvTgPkiCuFCqS8gwCSJc2TKtojGVvb58ojnjs8cfkO9qgyIkovDcM5Vfr79HHUuySt1+p1mZX3qBcuHBtS7MOYKhtQRHWOxxbl46ThH3NQvhAKcB4/78H48qD7TTJ6HZXRNggTdBRQidLybMOURJJBM3nmqZp1jrLzu6coZN36XY6pFmHNM2IkoQs65AkEd00Ik00kRanX6Q1v/vVf8/f/wf/PXVjUC5ExiLP9nL+HvSSHemWU2fb3S2s9zhaB6nFiXyxC/kwzkdVfJ6MU2inls7qzxaoZkrGiwgXeMDTAp+Q+6JaO0kvMJQ8tzan2dc0CpdoLcaDX41XKUNhMZ6CH2SgdXtv1tcNCvYXILLI1khUyi1RFRdPHuW0AMxQq8g7qbULwC1qgY4LK5MYmh4UudZGCKNK8oPg3PlzXLh4UdaEvQMylVDHECcRt2/tkqx0SOOI5568xHDvFjtn14n1pqj3Pf0EV27c5M7hIU89/RRNOeW777zHW7OCaSXU1CxLSKKEyXQCPj3Bd6CAn4dsjxaxcYtJq5CcG63rJU93eOBhpOA9Xpos77C1fY6f+JmfZffWbb7+1d/nmScfp9uBXq757qvf4l//8q+xtrnG8x9/no89+zF2zu5IlWSPSF3wULShM+uR6SJJ6rQF8L4WRhvHJ0qLvk81eO4HDQ8DfNo8muUv4qRR5s1TtRiiqEXYsSxLiqIgy1JW19Z82Pn4NT6atzYY9qqNUjsPqgJ40d6T1VRS4Oro4Ii7d+9x5+Ytbt64w+69A6bDCaU1pHmXlZVVdnZ2eOzJZ/i+L22xvb3J2lqP2eiIb738dc6d2+Ctt6/yH/749xNHfqI6UEYWG9MmyckAquvq1ChB8IJYa4VOYCymqZlNJ1jneO1br/B3/84vcOvmHWpT41SDimSyx2kskZBG1EuMsa3XpY2YIItYGE82VHs3DcZU/Mt/+a84OjrkP/lP/wpqYxMyJ2oe1rK6ts7R4SGr62sPHEPWOqJEobyU4XQ2Y39/nziOW6WgKI6IvMfig5/i4rz+gR57LYT274/UfTh4eegoyYnPf/BnBLSedkSLyR5wHR8ErIJXLKxP951ZHZ98y+c7NQrlNFevXMPZkqqKWs9zlmXEUYdifo/xcMjo8B62adja3qbT7VKUM6ElFhXDgz3iJMHWDVUxp9df48o7bzKfHnLh8hPoqIMppzz76U/htCQnx0narp9aKdY3Nnx+jeOt732P2zeusbK9hrfg2ucb7inc10eRm0drX/39bxFHwTOtCG5ZK/tp28S2dYuBisa5fRZzzC2cQCx5+8P7CkT1ybMH/Enbp+Uc4+Hekq0l4GB3NmX3ztSvVTA5uovkU4ByJVHk+L2vfZMo1gx6Xc6fO8Ozz3+SMxcvY62oiR6OpwwnNUobkjih219ldX3LF2+OvLRt5MFXQ1lJrshoLDVYmloS2uumljXXeqPJAc5IjQ20RMK9ESL3GSiTQRLfm6/BcGr3Mll3A50mGKKtv8m1trT0ja/hgv/4sZnvvIHrfO6FcwyHQ1ZXVrxilyJOEs7u7HDmzA5VJYVFD/b3RWxBh9M4jnlEFBIJcpraGO7e2ZWcpPV1b3h7ozCsccotnK9IvT4ApWTPCxnEC3qup6sFqf52TwxHqiXqsFta70ReWihXS32ivE1jJNdLckl0uDO0gihL+DM/9ZP8yA//KJur6yRJQhxLpKTxYCiOFLGyfi8UY3teVoymc7Y3z3g7DHAiltAYhzEOZxpMVTCfzanqEcPhiJe/8Q1++7d/RxRJ/cwgGO7e+e1QPmIjkZvgm3PLj8Mt20gCJKyXSVZLgAjwUTHXSh67gCDDQ/W/LgBjuxPRNHbpL7lir5FGrCJPInTt0gG+Y0NakvJyyUr5HLTFWuEc1I0lThMPxl0rQEAbQWIBNvCQ2DvyTQAgNtguePsX8NTJ0K8y/8I1HkeIGu37dfEdbfRKIUIMOuLy5Ut0+wPm84JcxWwlOYNeRhJHNM5Q3JnSX8npJBG3r7/HIIHRXREBcQoOh2OshclwyJvTCVvbGzxx/izD0ZR3RncwjWHQ7zAvCqnzp3Tb53meMZvNedj2SMBmpd+XUCa0QEYrKZaotSbSUvQxSRLSPPfyhSk6iljd3OFzX/4KncEakY648v77/Mq//FXOnb/E6mofF+XsXH6Cg8MRv/4bX+OX/8W/Jc9jLlzY5umPPclTTz/OhYtn6Q8GbRjcLlBUi8blmdwPMpabc84P8ODtgDC/ZBB8cD98EMi5j0YTUDQLA2zZ6JTmw6V+woW+nc1mlMWcbrfH2poYy2apIvxJEPVQBo1ybahW+MCiElKWc0bDEbt3d7lz4zbXrt7gzq077O8Nmc5rkiihv7bOzvkdzp2/yMc+8WlWVvtkeYaKNNY1zEZTlGvY3OiyshpzeHiH99+9wmBjk7XGsLq5InV2lGqT82SxdG3OgINWnjKEoa2xXsbS80jBc12l2Op4NqeuGn7pf/lFfukX/1df68OgtNQ56GQduh1RiOt0cuIkpihKEQWYz5nPC4bDI+ZzkfbUS7rzoY+Xudi/87t/gFKOn/0rf4XVjW1AaEFaK7r9HkeHh6yvreOjwThjqEpR9un2em1dHh1FZHmOMY2XloxkQnuP/cmx1T5rgmdr8ez9IDv+qD9sIH9A+6MYx8vg/v7PO48xjlPw5APhiOO0wNOcBQ+8p3aTD/9fmg/BIXKi3X8uMTiNs9y8dp000ZhGkva1grzTo7aGcj7DKeh2e5jGcHR4BAq63T7GNGgt1LU0y2nqmvFwRNPA7Wvvsrq6wdbOZXbv7fL8c8/Q7fSojSHSEZFOkJoEDpxmZWMD28B333iDr3/999g4c8bLjzZeaSlEOt3Dz/+P2n1N+fKT4S+h5AaOfphv6rhh4Pz48X8HinPrZ1cezqh2cLfjMMj9h6cV5PJlWZaE35bbTpgDi7kTohwaRZplbG1u8Owzz/KpT36a/uo6RVFyOByyezDDWkeWZQxWNljflEhM7DnrSuGdZnPGR2OKomA+F2nfpqmxft1qfFHYcC3We7CDM2wRIl04OAN4E0Pd4Zw3ZINylDfaQr7BMceEP9+Sm09+d8H7LKBFhfQQ51i2SZ31hC/nfdbOUtcGYyTZPqCDdgfWisHKgP5gwHw2ZzabMZ1NRXbW04G1NyZxYkTHiPd+b/+A/uqqBzBy3iiANr2I2oTcC6clQi/P1NPgwlgL1+NXSKXwkr6ufdf6+yUMRQBl2nUz1hEGIS85rz6kjz2bdqChlOMv/YWf4XOf/zzldMKt8chL1gt4rcuCeSGyzyL1XLYKo+OpqLT1Oh2m05nIkDcNTV3TVBW1tb4uSUNjvTOxEWdiGN+0oE9uPNSPcW4x/gMgcH4uts9fLe0hC0wkE8XixQqcB7nKz53g1NI4rBj0YesMy64C7aNAhOcexkpwcjgPFAl+jjAQWQAmH7lxePEASTduE/AloiaAMNLy5Uo7Git2rGDoxfoCCyaUrDsaFQQCdMhgCoBFobS3hz3VTPmFTGnnL03ynn0NUVASdXSIkhyI4EJTSuHhlZUuxayg21+h0+2yFqXs5F20s5RlwaiYeDCiGPR6DA8PoZOIzLVTTOdzjHNMRhMZB1XDd7/7No3WTMqGSEcUpmI0GpOlKSsrA9YGAw4PD1kZrFEVFarbA/Z4mPZIwOZP//CXPGdSihe1i5sfoKaRJEEQJCoLmmWwusYTn/wsSdZhPp1RVQ1PPPkY3/vu94jjDvOiwRIzLSvmVhH5QlnOGW7dm3L12h/y67/6O8QRrKwM2Dm/xWOPX+LyY5c4d26b1dVV8jxtk/kC7S0smmGyyO/eUA0TKgD24BnwEyl4W5YBy8MaiscAB4vF2L/rz7UMbPzW6SfmdDqlLAp6/QEbm1vt959sx2g4x95f4pwG1RMcthFd8dHhIXdu3+Pm1Rtcv36Lmzduc3hwxLww6DhjdWWFte1tLjz+CV78wXOsbazS72Y4ZxiOxwyHQ+4d7HLr7j06+QZpFLGxmfLEY+fZ2lrx1YKH4BTPfvxZZpMxb7/7Hp988RI4qcrbNAapt2ZxTnmJUEtjGsqilPySUBDLb5RRFOGc9vJ/AmZHwyGT8Zh/+Hd/gX//W1+lcSCQ15LlGdtntkjiiDSTGgNNIwAjTiLquiZJY3r9Lptb69RVw/7BAQcHh7LQLuWOOScTXaJFhq/+7h+Sd/v8zF/+WRwD8ixHaVoxhNu3b3tPUiT1FvIOnV4PiyOKtBfY0PR6PcpyThzF6CXq2Wlj6dizf4ix9yjvnRxD/1uAIqfCZk7rjVJhoH7IOT6UirYUlQlzTqJu8srJeXw/+JLVXWgrhju3bqIUYtT5vJesI7lUd6+/J4WEsy5RYomSDtY2rcGXJBlRlGAa4aG///Z3aZqKONI884kXuLd3yLmdbS5cuOBpmI44inx/aDCWOE/pr2/yh1//Br/9m7/K2vqa5At46UvnE59bu7n1gH8Ebh69LZuVIUlc3mnBtl+bW28rtN5j1Z5lYZYKx961B4WIsFBZFmGgNttCLX7Xkf9eJzQypRbjP9Ki3Hjh4kU+/ZkX+MQnPsFgsMrhcMztO3vs3bpHnmcM1tY5k3d81fIYaxqKsmI8GbE/njKdSiSmrkps01A3jRRntrK+hTIL1hovVeuj6Up7w0mJped8AjO0ldGdg7qq5XwEuriT3DCtMY3kIFlnfQK8JAgDi+ir71gx5lQLDjQ+X8VKJMPhcCYoPPk+loQUFKKsVHuaWVXXi4hGQE3+O8OXZllOnuesra1hjKEoS8qykDyiypeeWHKhZHmGtWaR/h9YF0uRu1b0KBZKvVPKV3tHaEOEsQVLQwasIQKcjrxwgORILEe7gmpqMKRRSmrEhOLkkX9MVuoItXYOAtD++T/7JX75n/8Lb8NZj1O9/eQdJst5JCG/bz6f45Si28lpEdjy+qtj8HkmVoOyHrB7hKJ8rg94eldA92ohsBP6tK1fEtbwcBMhaqPat/z5aIFz+HywRcU57BXPbLAUFlFCmdcyvnQYFirkeYR1Qexe7cQJFvYarWnHhlr6v7NBICCkT8gaEwQnCN98zG6U+wrTLNip1jn/LH00x8tsh2KcEnjwQCZ0UYBTntEkRUxj+brYv94+YwuxxU5mbDeW81mXSMG8LPjedEp/Y4OeUqi6ZlxP0Nb4tAQpSF0Vc+JIU5Y1w6YkUop5UZJkYlc642A2FconEUeHI3QqxeWbpiHPcoqi4PFLFzF1Sb9zljRJuX79BrPp/2YRmw7dPJekRB15Tq3n72ntZSPFG22NZ/MlOdsXnmBOik46lONDDo+GDFZX+cIXPs+sUagopqksZdUwm8tCq726hopjkt6ApD8AY5g2hjffvs3rr7+HrRuiyNDtddjcXOf8xTOcv3iB8+fOsn1mi9W1FfI8F1qPn6i4BeVp2TvU0ujcYoFUBHDjB1vwFJwAOSe9y8febxeKMG7NAkyFIey5rpPJmGJaMFhZYbB9Rhb1ZVWUB0WFlGp13sFh6prJdMbR4RG7d+9x+8Ytbt24za1b9zg6GFNXBh3lrAxW2djZ5ImPv8j37Wyzc3abjc1VVlZ7JKniYPceV967xr3b17jZiByo8RSqPOuwutrlictP8IlnL6KjmsODA0bDAzq9Ptvb2+zv7zEaD3ntte9w6/YhTz/9OEXRtNGZxjQ0TU1dNy29QSklRn8sIKYNx7vj+QPOOcajEePhkL/9t/4m33z5NRpfVybLc86eO8vKSo8ojsiylPlsLhEipSlr0fwXSpoYo9ZY4jhhe3ub1ZU19vf3GQ6HbfQkPNemacA5hsMRX/3q77O9tcmP/7k/R1lXZKnUukmShJ2zO+Hxt+PB4iM1OgAbRbfb5WgY3QdqPowq9kH5MH/c9kcB8A9zrBRFi/wL7Rvy0y1TAk/7nuPzLeyKirD6L9Fh/WanPc954Wm/30ERBBoCRcZay3RWcnCwjzEVxso6F2vIs4zr776DqQo6/U0ms5ks0FHM6toZOt0OjgbbWA727jA6GnK4d49iNqbXH/Ds85/GuIjN1QGf+fSnUVGM0obGeuNQKR/lkbolv/hP/yk3bt+lm2cLd2GYC2aR6Bvui6XfPwI4j9I8RaQdFkvRfD/22iGk3BJ9Vz4bPrhQSFIop5e80l4EZRlgc2xEt4aMQgwPVODoS4G8bq/HxQsX+OQnP8XHP/UZds7ucDQccXd3n1v7Yzp5zoWL5+n1RGGymBeMxiOOjg4ZHh1JkcWyojGN5PLVDXVTYZp6yQHojZ4oQscKawymEaZLUzdL1+wNTyWUIeeO96HyBaR9CRNwrhUfqCovhuGWqWRSWwUn+RFtToELKlW+yKNpliiYy+N8ATTc0noiyfBQzkuSNPUSy06Mv7DfLwNX/KNU4kWP4ogky1jRKx6vKB9BMW19E/Guh+9XbX7H8vqmtfb0IX+Ur89xbFVbTGRv/Cuc9vkFNgCSpWhPO3LERsAuOWv8OBQnrZaIVstwCPkX3mZrrBSBkVcEuCq5xsjalqGDUhD5kaoUSZqQhbqDiDBDFEeL0gke1LVugNDX7jhYcc62QMbhAYWSPO5AyzIurNMCYK1xNMYQItXWC6y4drws/fM2XxhLy5MvSVPiJPFz3Yij3okCpURSNCoK41FUAdES0bEWmiWhiEgtWBZLy0j7e3gegcLpjU90tKhrp3y/WdWuCGKf4NcWz41VIRLjwj7ovEIaLASnwrVI2C6KIl+Y3a8vfr0JZE8XqLHGEY0mPE7E450+uU8em2m44WYURcl4PGE8nbPnQa7SmkQ3DOczNnoaHSt63YzIimR6rysCJ6aq0F41sW4aulnKCx9/lnv7QyZlKfYTkMYaWxbgLHVVUxUFeZYync142PZIwCZPEvIsRSEPKNBvYh2MstgDHT95dURv8yKlzahsSm5lIdzc3KKTSMXfg919du98h3lZQ5Sgsz5pmgvf3GivkmZbyUVrvJxfnBLFKcpBUStu3Blz7dYh5quvYU0jRmMnZXNtwMbWBts725w9v8nW9hm2NjfoDQZ0eh0SX122NYDC4LeLhVK5xcB7ELiR99WxRVcGVli+ZDC3hIegqKE1k/GE6WjMYHWVrZ2zOCc5HceM1wC22qT7hqqomIxGHOztc/vmHW7euMXdW/fY2z9iOitwNmLQGzDY3GBje5OPv/QxNre22NxcYbAyIOskKC35LOW8YDYZc+fGFW5ft1jbMB6NaVzMfF4wHk+oakOv32O10yPNEp577gmee0qiMO+/fwtjLDs7mzSmZm9vj9lsyrWr1/n611/jhS/8IL1uxmgyoa4ruS+tiXREnKRY6nYRWC5geZon2jnHdDplNp3y//5v/ju+9c3vYBUkacSZM+e5eOkCUSwCBFVVMp/PGQ5HDEcjqqpu+e3HgWIwZGTB39raotPpsLu7e+yZg6i+KSru7e7yW7/9O5y7eJ6XPv9FTNMIT905li63fYYB0IRojVKaOBaFofuUBT+kPSqgORmNeRDN649jEH8Y8FLB67WwCh8I0O7/eyGiELxcbfTHWWgLjkEUe8pJez8B7Mg5jTE0TeMTme3SOQHnmIxFYjZWCmtqlGtQSuQmXVOzdfYcWZbTWRV53LquaSrD7p1bzCYjXnjhJXCWqiiZzyfknQ6f/NRnaeKcK29+hx/7sR+nM1gVeUtKIu0IxH7jC8tZZ7l14zqPPf4kK1tnuHXjmmziLcg33pt6P3D7CNg8WtMYoW4E0o5fsr0JRsiViXUw2F1bKbuqK5w3goLxEowRIcHIPA8UNeX/F5TUtBb1qAV4UkLX1RGdPOfS5Ut86lOf5JOf/hSbG1uUjWP/8Ijbuwesra3x7PPbRDqmrkom4zFlWXLt2k1mswlVWVIUM5Hk9RE+2zQ4rxwWrsc5hzGGKI5aXnskiQHoNMI03sPsK6+3ao1LzgPJNxIQorUiSSIg8WpHjjiJfH2QmhDFkcRsES+WOajbwtzWG2imjYTalnrZrh2IUypQe+SaFl58b2FTG0MeecNTI6BEe2qOC2NAer9VkHIWpzXKg1ilwOmwjkfts5JzLMdwVCvnfRy6BsMef80nHBDOX5daHoPecG0xkULHMQ67cGSGtdTTjXSwMDxLQ/KePOBcuhgdDPETjl1YrMlCmxMl3DiOvQ0mYKGq64VTCNpi6pLgvZAEdsa0r1trcAYfbTY+ud+0dPPQjM/RMh41h2cu1EPXKsKpJblc12K/hYNLK0ea5f7aLU3T0Ol0BTw7S1GWZHneRi8CFFAIuLFOkwD4MS7y7uFYuQZx1kk/WqsWEuBKnlfL2PH5uuDauWONk7nuwp4YHCq2vZ4F2Tj8P2AiD3TaRcUFz4vvC4nEaOXTRJJFoXPnRF3RGpHsDsBUG0t3OufjacpZrYnMQk48do6OUpTFnEI5sjynyVKMz5WiMuyNxmT9mCS2xLEmMjFZktDYBjMrOBqO6XY7dLuOYjimqmru3dmlcYqdtQ0KZ3n3yjXiOGY0mdDv9RgOx6RpSlMbdra3eefKAQ/THgnYZGlGlqQ45+SDDqJIH6MMgYTY4khjdcLBqCRb28RUglTzTJNGMW+/8T2+/c1vc/vGNaIoxjnDfDoSqbc4Jcl7ZN0V8t6AbncgtI84Ax3JZmF9CDMsDMoP6jhBx4L8Zg3MdyfcuDtEffc9lBJ1kTjWRHFMtxcz6K+wvbnGxvYmW1ubbG5vsLq2zmClT7fXJUvTVp1Kwpt+cC4N0BCVWRhjCyNZBmNQK3FtQp/WmslkzNHhiP5glTPnz8v7rvYRHOFtmqZhOplxdHTIvTu73Lh+m9vXb3Hr9j1GhxNqY0mTjNWNDbbPbnPxyWf55EsDVlYH6CQGZ5iXFWneZXV1hdlkwv7Bba5deZPxdC7VjmclVVFRlYbeoMfG1ipKK4qiYDgcMy9r0jRhZXWVvJMymc5QOubd965xcG+fNMlI05hz57aYTmaMJyMmnrL25ptvM55Yvv8HPk9VlrLhqTxsASilqeuaupXePA5k7gOKSq6rqgr+h7/3C7z88isQKTY3Nrhw8TxntnewTryLo+GIO3fucnBwKKo9xtAYI1K5SxGSqpLifJFXLwqa+lmWcenSJXZ3d6XGxZIB3FiDqkree/8qv/XvfofLjz/B1s5ZdHQ/QDkeSVgy/nFopUnTrL3/R2kflNPyoHOdlp/1MJ971HY66BeZ3NYDeOL9oABzf06NIijIhbdawwVQzicaKknrjJ3CBUPLr+RNY1r1pqYRalmImgVQoTxKODg8oK4m6Lj2kRELLiJJM9I8pSln3N3fZT4dkaU5585fwBnLfDZmdWVA3ZTeI2lJ4oTLjz1B3Bnw6tf/gHJ6yPUr73D24uOga2prUZGVcdNuTLKWXHrsMT73/V/mrbfeW3C9gzEUPNUusOsXPpmP2qO1L3zuE1R1TV015JnfEpWj18mxjUFHmtl8hgWyLKMxDUkSkSUZzslaMpsX1I1jOi88UJDnE6GIY0W332FtdQVjLaPxjMlsRprG4BxRJM4j0xh0lNEfrPPEU8/w6c98hsuXH0PpmPF4zGhSknW6XL70mI9+VIzHE+7du8fu3XsU8xkXzp/l8Utn+e3f+fccHh5Q1zXezGnlixV4cZKIKEqIY1FrQ8kxIZISZIKNsj7S7MGH96g7Z3HGiDiLkto0TSMRobC2RVEESjzsdd0sgJ9znoYW6E1a6rMYibLYUCQxFDMEvz96wOBfjIIFqDRgfa7N8RyVZcqPc+CMwzR1O19CQexIRZ7uFuaUp3RZlhKhxJgNYBdjCXKt4vyQej6EqIyjdWYplvJjfYQkRAP8VCYOMlvBUF+6jpbSHqRv/dwPdf+MNVIU01P1jGlakCD37gGDMW0EzhpRqcP53FZjqP0eGZ5TMN6VYlE/SwcAIbaNlCcIDmDdAl6tNU1dS+FWKxEzYWX4h+qfVyg43N6zC8DVtYCDVnnOR/+tbp0QLQXM0dZaMs7S1DO0lsK0URT5yITFOEs5m1OVFWmW0clzD2AcSkVoHdEYqQsUE4tKnnU0zs8TrwSsnUNqykSeEWQIOTAuRGc0hNBmSz1zUrw2SSQY0N6zjzIFQBPog9LvEGxJp+Rf24d+sC9HjRSaONYSLfVzNlDncD4PLSSq4cBZdnTEpgd+zjv+dBKD0sQ6Yn80oqpK0nSGUoo4TsiSmKqYMbcGtKaTp8RaihHbOJYi09ZRW8va2jr37t2V6LGVPO84S7l15x7zRpxERVFRVRUH+wcoFUn9K6UoiuqDF/Kl9kjAJokjIqUXWuX40J+WTou8ByqKYpIkZmY0K1vnGc0NQvZsGO/f4bf/7b/jnbffJu90eOELX+LMuctcv3KVOzfe4fqVtzB1SVGMGR/d9Q9Xo3REnOakeY847dHJ+ySdHlnaIYoyMZjUYklyfqFwfnBoJ2tTlqS4SFEay/Rgzr3dKe++d6PN2wDxpCWJJs8yBoMug9UOa6trrK2vsbY+YH19jZXVVQYrA3r9Hp1OlzQT0YQoXIcHMItFSgaP0oqqmHH79h0Gg1UuXH5MvLtlwWQ84mBvjzs373Dz6nVuXrvNvdt7jEZTjNV0en02z2yzc36HT730OP2VFfqrXdI0oigLRqMjCdWrhru7d5jPCqZT+Tcv63ahaL012nOl0VinWNla5+LFbVHFKUoOj0aMxnMslvG05PBoBkryq9ZWpxRnNrAWNtci1ro9prMJxWzK8OiQopjz7jvv8tor7/BzP//zPHbpLE1tqESBU2QSUS2okWd2f4TmpMFe1RVNXfEvf+mX+J2v/h6dQZ/VtT6bmxuc2dkhyzoMh4fcvHmTO7fvUBSlbLQ+6pbnOVpr4Ub7QlFJsnh2IUESFsb55uYmBwcHzGazY9dTNw1lVfHtV17l6d/9Xf6jn/xJX3T0uPjAMqg59g+hwCVRTF3VC57xUluOKj0oB+ZB+Vf3A4Q/XvujRnTuuw61MMaX39c+CTLwg5UOqZZhA1W0sphLwMZ6b5kzCqsiYmWwrmI+nzOdTJkXhWyqqHbst9W2Q3VuFcQ9LLu7e7imxjjPwHYOrBgBB3t71LMRVVXgMF59xpF3u0RRTN7pcOPWTdbW18k6XfrdHucvP8Wrr77CbLyLVvDat7/JJ174LOlgk7hJfPV3WnqZdZZzZ8/x0ue/RJ71MLXxNQdkExUHoT2uFOXa/33UHrGdP3+BfreDVlLzIwytuqrBieqh0FIADU1t0cqRJDEKTWMbecOJQVM3DdY4sixuc22Ust5zLGC7aQxOSQ2KKNaUZUUUpZy79DxPPvVx8k6fopgxHM9I05RefwWlNNPplPfeeZvrN27y/pUr3Lp1G9tUFLMpGxvrvPZawt27t5iMx77YZxD20cRRTJaldDtdkjQmz7vkWUqv20NHHTEGrWSuGOOdAVVBrGKUcS3oCNFnSca3Ytw72shO8Og7RysshHOUVdU6KoJ3XauoNU5tIxEA63Nwlv2ETV21+QSSa4kYm1owRBRFKJ3inOX27TuURenxjqKsa5paWByRFpVWHWkiJQ7O5ciTUnLOKOyPSvuE+koS6qvK5+1IwfE0STi7cxYdx97jbohiiZy41ou+WDuVwufJicfeWPF4B2niwlMDrTEt0DNNI0n4IQfK59jVtS8yasShY4KUtoNArVUqJIUvsQ4UC1Gw5TUEMVesCQ6zRQK+0hL9cX7t1lEE1vi6OsGJ5IGIsjgX1P+WZQVVq7hFiN4HCWylfSFLR6CByvW59js9tF3aW5f61YNdKwOUEDGTCA/gHVTTqZFrR1R9XVGQVTnKOdJcciMluU3U76yxIgLhnfeh0l/4LqeUL08R9nxZo6035qPIq4+gUVrATRTGhjPEUdayjA0hIqna5+KUp7lpt6CL+VgOup1J7ZySbpVcruCsddZRmYYoUm2hVXEayCe18lp5MVx3BePhjPU4IhvP6XQyEmdwScy8KRnXNSt6FesjcmmSoLsdamMYrG7goh5lMePO7i6bG6tUpubwaEi/v0I1GjGcjDFG7qMqKvbv7eGiiPXNdfI4Y1ZWaEQEY2VllbKsOBweEWh5D9seTe5ZKVQk9KFOt0OaZfR6PTq9Ht1el36/73XOUyJt+fbr73Ht0JHGCVurOV/7rX/NV3/nq5B2+IE/9eNs7lzAqYgIMDrmzIWnSdKUK+++Qd1UtM4MZ3HGURdTmmIq3vjWS6PRUUqSdIiznCTJyfMeSZaTZDlRlKCiyGu3Q11VrdcF12baEEKxstha6gpm9ZzhrCTdHxPrfXChAJlMWK0gjjRplpHlsmlkWc6g36PX69Af9NtiRb1+hzzLGQ0PeOett3nh+76Pd777Freu3eTunbvcvnmbvf0jnI1YW99gc2OTjZ0dnv/CcwzWVkizhH4vAQzz2YSiKGnMhL29EdOJFBTr9Xt0ujnzuqauDU5p+oM+jbUcjWc+edLzQxvjE57lOI1jY3sNpRpmkxG3b93l5p17OGI/ITSNtURKvHvDgz2q+ZxBv8/ZszFJGmFtw/7eHlVVcufObb7xzdd45pMv8uXvfwlTm9Ydo73HoKqlUvSDjOVjhrz/uy5Kvv3yy/zSL/4LlI7o9Ts89/yzrK+tUxQF+7t7vPnWm4xGI+bzOVprkiRuKUeihmaZTKbiWfVE8CjSdPKcTrdL9P9j70+fLcvO807st9aeznTPHXPOrMqaUIVCFWaAJAiQIASCg9jNNtVSU5ZaoQ637bAj7K+2I+x/wh/8xR0tD225OzzI6pYlkYQpkuIMYipUFXKqnDPvfO+Z97jW8od3rX3OzaoCqqJDn4SNKGTmHfbZe+2113rf533e54kixuOxVBg8inPu3DmOjo6Yz+dnaGN1XTMajfmbv/kun/nMZ3nhpVeEg/9MMB/+ExRMzDcDItY0zYcmHh9EeXx2bP77HD8tQfpJP/9xfm5JD1gmNm1PjU80WlQ5oIE60EyWm5hSAVFf7sbaI1qNqVjMFpyORsxnMyoji2/wa5Dfcy3gG+ZUKxnvYZGTo2O0liQnoJ/EjqYJSO/S/6AxDU8eP+DKcy9x/sJlur2MxeER8+mEjY1trGm4c+smJ/sP8AsMs8WMH/zVn/H13/gdTJpQVkIvwTm0hosXL/Lpz36RtNMDVOv54wgOGCFYOJsguhZA+dnxcY4//MM/9tUIPzHc8l0ISWNbOVASQsRx7IMwoahAmPMqxGyey+4VmozFi6FJoqM13bUOn3j9U3zui7/A5asvsnPuHFXpyGcF02bhARLL/v4hT57scnxyzHw2AxxrgzWev3qVV156gU6WoDFcf+FFxrMF5UwoaXlZMF8smM3nLBY5u0+fsvtkl6zTwVYVxjiqMqde5GRpyvrOFhtbQ6bTGQtT4EyDdo7aWFwjAbTSS4S+qcVEOUjvt03OSuhC1knVAEBHMYHyJuCY9DZYHM5YjKlxAbk3oUrjgQtjOb+zwz/6x/+Itc0tjg+OSLOEl155mTjJiOMEZSwPHj3kT//4j/mLP/9zIPRkKsbTGZvrG1IVDc9WCdqunqmwO+fIC/GLmxc5VVVLtb+p/HolfRBaK7Ksy8ZwHdOIapi1QusWRTnbrhOhytHGHH7fcZ5ap4PRbnh/1bI6Y1eqLM456iZUkH2FZkVNNPRlhPuAUF3ShGpXS6uWH8Kvwu264Xw/j1UK1Up1K1FAb00SA+XSthWQMK4qGJV613gpBPgG95BvrI63fKg8h/D+hcMjDBbpa1EQBN3a96yVD8e1e4oL1xLeSeXZPRasa8A0Z/zhlBKgIbagtDS1axxWKdBQNzVJkvrWCwHdvC0mGk2cSHKMF7HQQRBC65bGFm7WOcC4dv9SSgnm7xxxMHv1Y+a8kahRTUiFvPx12KmUl/mW8VDOLc3so8gn5sIuSeII46mE1se9oV8qjKMxNWWsmGYR7y0WJAmk1ER1TWQjCq2pm5qjwyOZAyjfb6Yxpqbf7xEZwyA2DOOYKBY/Gh2LGuO1q1coioqyqul2u9gGup0ux/M55cEB587v8NL153j86CmLRc5JWTEcrnH18kVcY0WQ6iMeHyux+YWv/zLnd7aJk4Q4ltKvHCED9aUtHKPTCUenFWnSJ1M5//Kf/lPe+tG7vPbFX+DTX/4KZR3cmBXj0xFKKaI05tzlF+gNNrj5zvco84kkL63EtJ/vLpSqG9Y3NtjcvkhTW/L5lGI+pZwdCcrSNGIM1BFeZZr0SNIOSSaZuY5ilPZOx61pk2sTHPxn5gqSKKbT6ZBlqc+CZWFCRzRO0ZSW6WKONRNCs6MLEsHWoHxvUJTE6CTmb777Y68E4zg5PqI/2OJrv/5bbG8PQdlWKaSsambzU3r0KArD0eEhSdKlv9YjiuDk+ITD4zHDjXWINEkSsbaWQT+jrGrKoqDbSeh1NIdHR4xGY5qypDE1zho2dy5gAdMY9ne7zI4Vk/EpP/j+26i4A06TdeSez50/T399gFYddNKl2+/SlDnj0xG2ykmSiNl8wXg84q233iHPNf/4P//d1ohsFamv6/pMUvPTmp8dUBQFJ8fH/Bf/x/+Cbn/A1vYGL73yIlnWwWE5Ojrixo2bTCZTb6jYodfrMZ3NiLMuF688x8Urz7O2sSXzLRK+/Hw2Zu/JYx7dvcP+/gFJmkhlR2mqqiLwuzc2NrBWqjwBKWqahjiOeO/OXb73ve/x3PPXQUkjZVjFjTG+rOu3hfdVc8Idfnhy82FHG6A/89s/KQH5STS0D/usj5LQfGAyI3+RhVg7cQj3tM5Q2WyrWYSERi8XfXz47n9uqT4k11qWJdPZjNlUgreyKmlqgzOiVKQcvrdHYfVy45djieoFx2ZrHScnJzhnaPn6ntuttaKpK5qmxDnhGZ+7cJnzl67x6MEjZuNjXn7tdQLVdDqZcHS0SyfreqqNoJMWy7s/fpdPf+7LdDcvoFUkkuZOXO8/8errxFnPByDa93SFsfASuUtHwDYWaEGaMGY/Oz7SceHcOTE7VQgdyS7BK6ESKf+OebTbqyStKi8tg0UQHywn+4T/mqmtL+g41odrbO5c5PM//wu88IlP0sl6FHnJ6HhOWdTMZjMODg84OT0FYHNzyM7OFi+++DxKKS+rK1X1g6MDHh3ukc8m3H3vDt/4W9/g1c//osRZxlDVDUVV8dZb7/D0yVOGwzXiKKLb7ZJlKf005eXzV/jcz32J3saQuJPy7T/8Nzx8/LhF041pmM/mzOcLr2hZAUJNkd4AR8j6bABtwriArxr5KrjvEbXKrIyzo66N70/wc9irZkmvg+Pk+JTvf/d7/IP/yX+G0hHrwz7nz+8wnUx563vf5Y/+zb/hvXv3sUYq5jgnQjFlxfpgTXqgrLcNaBMMeYeEKmNb6lDjjUeTOCZOYvq9LnjanA2JnBEg4uBwv+0PCYGnc3bZSxut+qoQInmf+FkRWWq9S6Ra1lLs1VL5ytnQl9TOOG9x0a5ibTUmoPdtU3preOrAK36t7hcyDkF4YOkFs+rhs7pPh1jM+R4MyVV9EtqsiC2p1UTDYhXoFTW69twKlE/ywh7g2v+tjJlbWddWkgUIYJU7A0qEkZLECFwkIJm1y7mnlaIqC4lFFHQ6PaFQtlQ7fG+YJdHxsqfM30eUROhI+75I+USrHJEWihnWtupyOO+/ZyUBKauKSMVekCI0c0l1NdxnpDTGeSEFVOsjhAqCUbodjCBxLfmYJSL2fVvScyqCXiFB8kmNdTgrxuV1UUqSXpQs8rnQ96wIBHS6HVBIlTryFWx/trosKcqa0WhEN1asX91h5/xFOr2E/nC9tbForCGKCtI4QamYLOtwf3ePS7WlsQ1FmbO+NqT/UkeoucbSyZK2BSZ6MeaHd/4lH+X4eKpoQ3EmlgDIYhtB3Kuipq4qFrM5+WLOorSowQ5FrYjMmP/m//5POJ1WvPnVb/HcJ17DOk1VGTpZQlWULOallLWNpdvtc+H5y2xffoXj/Ycc7T9mMjqkKubCA3a2fRHPnbuIBaI049K1q8ymp2ivqlLlc5pKaE5lkZMvFlSLY8q5uBDXRnjtgvjExEmXOM5I0ow4SVE6lsRHKYxzmKbyDvYdut0OYamq67qdLHVd0xhD4sv/7furFYPhkH6vw3w+EypU1mEyHfHk0QMuXb3GV775VbqdlIOjfcbjKZ2OJGAHewfEvoclSTXHR8coNFGS0DSW2WRKWcyZzyfYqiQGep0IFSfSnBbHZFlGJ0voRYqcOZPJiPF4CjiqfMZkOkFFKZEr2FjrMpuOmS8q5nmO1T0iNWW979A0HB1o4iShP9xgPp2SakNjSrIkYjDokc+m3H3vHm/feMT/+n/7v2J7c92rliwTxZDUhOOj9NVUdU1jGv7pf/V/wxhL1hGz0iQWF+Q7d+5y+9ZtZrMZWsesDTew1jDY2OELX/sW3cE6k8mEOzdvsDkdgTNYJ0IAzkFvsMYvfvM3UDje/v53uXf7XbqZKL8EmWfnHBsbGxweHmKMaXtxmsYwnU55++13+NKXv8y1519o1dzsirTr6vG+xM25Z9f7D/2dD0oyPiyp+aBk5cOqQM/+/MdNjp7t31mtwGilaZQijiPZ7ENSx8qf+MBHq/edMyBspmlaitlisfDJsffKsFYoY6Gxfhl1SkXGnRX9cMtds73nxjpOj49lsUeRZXG7mSRJio5ikrTD+XOXWNvcYTqd8O5b32d75wKf/vzPUTaOxXyGosvx8QHOGuI4pru5w3h84pFpoep85y/+lG/87f8IUBRlhXWKJMvIul2c598roKwrMaBD5G5DErjyQFektAMK+7Pjox7j0YQ4ijza7L+oRE1KerCWCbZbnassxRwiHQtFe6WpOyD8oc9i58IFXnz1k3z+yz/P+YtXUETMZgsO955yeHTE4dERdWVY3xhw4cI5Xnv1FZI0oWkaT9mtsKZh59wW3axD0zScnh6zu7vHlUvn+Lt/93e4du05sA5TG6qmpmkcf/LHf8rv/d7vU9cN3U7W0m6zJOKTVy/x9//xf8bW1Wv88Lvf4623vw8I0DUZTRhNxmI/UFWYxpIkiQA3WtBit4L+W1NjcG0gG2lPXXcrzeGNJHihcuMHW6oBLTdG3nXrq6vaAVrxp3/8Z0ymU37zt3+L6WnDf/0n/5a/+Iu/5OmTpz5whKqsfL+HJDhFXsj6o7UIBfkKccBK/KW3FRDloDbGB6Di8F7XdSt8EipRCjw9dGW/sstqTOVpdcH+YhmHL9di5RMQSeKsHzdaOr9SQncOQXgay9iH+2u3FrdMAGxbwbAhsyBQpBy0v2NbmWLXro+rQgawsh89k9S0z9v57yl15r5a81l/4+E7rfgaSzAu/F2FpOSZcVJueX+q/c5qjck/TuV1KHDtZ7T7mL9F6xXYwmsewIgWtAj3bH0SpRQqEvqitQ2O1I+dF2rwlTtHqKjJ5zQeeAiAmOeMLa/JSZW3rg1N7HDKesqaUBDr0tMmo4iqrmTP1Fo8Z1ZMcLG0fW+ShwZgS6Hw+6gCkT237T05X2EToNVI71AjMVldVZRlKeImQO0rrpSKOE2w1hEnKTpJxRfUWWbzucw7r3bWWMtguEGvLx59URQRxzF1XZFlQ6w1JGmKiiI+e/EiOkooqoqiKMmyhNFozMZwiEaR+Eqvc47ZvyuDzod3boO1zGdzUbqoKqqyIvDfA1lwePkVpqczmnrGv/pXv0c0vMhnv/g6g8E6SZxSlDV1WVMXBVmaUeQFxlqyrIfuDjEqQ3c6nL/+BhdeeBPT1DR1TpGPqeYT8umY8ckTxqcnlGVJuShIVIfxwSOmxYKXPvlpJqenNNZy9fqLJJFmMh6RZV0JakzN/u5TDg73qIuK2k9vG4ICrdBEKL/ARElCFMVknT55t0un26HT6ZHECXGSivM8gtTJgmjFedeJzOXGxjrnLmz5hEqUYmazCTd+9H2GG+vEacqPfviWVHYczCYTykKMwuoq90iTLHxJGgPinDyfzZiMTnC28SqNspAmUUKSyEsYx126awN6vT5pZ4063Wa+fpn55BYuP+B0/AitIjr9AVevPs/Vq+f4zp/9G6bTMUXpiDYuU9RTpo9uYRpRSwuGW51un/t3bvPap17j1VdfkZJubfje927w27/zd/j85z/VeoDgF4+mbnxSsyyN/7RqjSia1PzNX/0l3/vu99jYGHL9hRfZ2t4mjmNu3rzFj398i/l8xsamIASd3hqf//Iv0ijF9//6r3l49w7bO9s4a5kewRe/9GWSjvDqI62oqoqj/Yccn0547sVXePXNz/LXf/KHjI52GfR7xAnkuajDbG5ucnx8vCz324amiXnvznvcePddrj1/3XN9WdKIPqQS0lIU1LNB9wcnH++rivyUd/bjUtZ+YvL0kyoyK0lNQMKWDaaS2OBlYFWkW671ko7m70Zx5jzOBxaLxYLZbEblqYTBtyEg600jwhDGCvfd2uX1rG5w7kxys7Jd+nlX14b5fOp7fx1lUWCto5MlQMTW+Sv0ujGmbnh47zZFPuOTb3yG5158g7qpJeltavrdbXrXX+bpo/fQSnrDXv3U5zk+POBg/yE4xe333uMTj+6TrZ1vqzJJkhEnHUHdIuk/aOoaJXyFM5XgswnsaqCwrCT87PjpR1lUNJE0x1vTyLuoVGAteiTdLkO9UGH06l1CxxHTP4da0lwQCs3m1ja/+PWv86Wv/BLDzXNMFyUH+yccHhzy5OkuSisunD/Hp17/JL1up03slUdj40TQzSjSRBGc217DNIa79x4yGh3x1V/4Mt/61jcYrg9RVmEwOAVHh8f8V//0v+Z7P/iBUJX7ffprA7IsYdjrcPnSDr/4ja+gM80737/B/YePePHll/njP/1LfvDWjyjynP6gTxLFnnpnPSqMD66bM/MuSPeGAo5UKYN8cRhtmZ1hPwu+MhqIYt9L599XqYh5RSlrQUV85zs/4M579wAYjSc0xojvTtPQVAIsOmNJ0wSlFHVdkWZp2zxdWyNVeL+EGOdatNzJBfnLDD0fvvLhf87YZWImHmzgY+L2XkLPh1B+rO/rUf5+lsBDMKH2U8xX+DzN0ff2NI301cSJGDpLmOx8/9SKMEAQZPCJZRvY4jijKElIdvx1tuppShI5pYmjiDbcd275TI0kWMY6DLR7nA7+OM4t5bb9s273QT+s2k8YR/sRMqfafX/ZHhDGMrxzoaEeX/nWvoLf0ryV1xd3IdmR+RPMVVeTraU3TgDgtFRBVSlJixZ1wkjFqMgLNHgqXtj3okSjtIAAzoroQu19z+JIKjnOQRKlywHw70JV18RRInuhUoj8nu8B85LIxhjquiLPGwb9tfY+lWRp8r5Zh4rlWQfJgeVzXqaCng3oBQN8n5aRdc00DXUl1ZqiKKib2oP6y/ilMQZbevlt368UpylNVWKNUOmbxtA4h0Hxoxs3qSrpc0uSxD9VjXVSyU7ThEgvOQbOP4M4kcLE6GREpFT7xB0yZh/1+FiJzd1b79HJMq8epXx5TLeTyyHa9Z3NCzy+cYub7/yQR08f88InP0fWycg6XVHjcLC+vk6vEwkShCNNuiS9NZxOMAgfMjwUiFBxn2zQo7t2kfXzNaPTA5y1dDtdOlnGJ197EXOxz1/88EckGH7xzZf4vX/7Fxzv9djoJjx9dI/e+jleefVNzm9d5suf+zn++q//mB/ffJeqCc3i+AVMtNMxiqYBShn86Zg2iRO6SUSUJKRJRpJlJGmXOEnpdHskSSYPSkf0+12SWGONPKYkiYhUQpbF7D5+yNHBPlmnS5ImXL1yjU63w+TkgMVsijNVW47XWtMk8sgaI0iO/NPb5gIx2jeFdnBKs7Z5jsHONcpkg+NRyfhwFzN5SGpyVGLoJh3QMVZFTJuMNEsYnxyTRBB1I1wCUbYObo0yn8oE9Qt85SxHVcEP8xnjkxGf+/yn2X+6yxuf/Rx/7z/5295Iyr/NDhpTUze1pxL6RexDqjTt331QmC/m/N6/+BesDQdsbG5JtSZJuHPnDu+++y55vkCpiMHakOdf/hTnLl3j1o13efcHf8VkdIpzmnyece25y9y6eZuH9+5x5fIlWvM0pdjcXOf8hfMcH53w8M5jfvEbv8btd3/Ee+9+l3PDLlUt5qBJkjAYDJhOJ+17YI3h9GTErVu3+Lmv/AJrG1ttgPNReog+7PvPVl9W//6TKi8/6fiolZgPSmTCn20lJSy2eiWhCd/3HkDB0wCMuBOHjTOcvkW3/UJsLEWe+96AeSvm0EZROtAmpLm2aSTxrZtG9PKfaTJs6xcuIIBhbsmOG8bSOUddNSzmC5xbunkHtM9Yg9YJp8eHjE8Osabi4oXLvPLaZygbqKqcfJGTdbscHO6zubXDtRde49Hdm1RVzs13v8/1lz7FcOcCJ3uPmY5O+P7ffIef+5XfoG4E6Q1VodWEz3rAox0sBKmzz1TIWtTO/eRn/LPj7LE2EFqWjNjSsT7MD5zBoWRPCHPCBw7iKC/9lko54jij8UF71u3wxZ//Cl/7xt+iO9jg+HjGnTsPOT495uDwiI2NdV559WV63W4bXFkLaRyTJKLMVNeV75PJmc1m7D5+wv7eA6xtuPrcFf7W3/oar732GmmaehRW6Gff+8EP+T/9k/8Lu/v7oGB9bZ31jSFJHDNcG3Dp4nmuXb3C3tGUh7t/ya079/j+W9/n6OSEx4+fio1DHDOfzT1DAV9NDdeXkKYZ4IhiUToMfUhi8ilVmiCrXlXyDlu/pgewPyDlFoXxypBRq0DqneF9laxqpEfg9HiExdJ4oM16wMzUFbU1pHFKlqWUlagrtTLRyOeZuvHN06EPyCz9YTxQskpFx9GaIgaarIB8tq1etEt5qJ4QvibqXM6EaF76UDTSr2EVKGPbvSKK4jbZkD4ViBNJLENFIVQaAhJumgbrneZD9SIkCa1iW0srC/UJWfN0WEuVNLVLJUK3n4FdSTOcpaoatHZoJ8+pfaZuGfCHQFQsLVbpZMs1P3xNr6y97wf2fILSJomunQvLsCIE7cvG+pAatfuOwgNrIgEdxre9EF9llQpbQ2MbIiKfxFniKCGJIxK/b0nlVmKbpqrJ5znjyURC9yiITYg1RtbpUJhcEp2wpjvHfD6j1+0LkIIC5e0KnFlWYjSkWYqxhigRNWBr7YqYgMAtTVMtFT5REj9ri0bEOSId0VC3CZ9xogpnTcNiPme+WBD7ilHTNP4dFq5J5P0qBTwQSiS69n5XiqYuSZOkVV8MUvjj02PpifHnwbd6WITCm8SRvEfG+qTRg5x+HZF82LUMrLZK+RGPj5XYSDChPDphvHqGo/IOs1opjFMknS6jo2MOdg/o9foc7z3l3u0bfPKNz3Lu4nOUxpEmMZvrPR4+OqRxKWkvAq92Yk1wVl0pQcq7g8Fhi5xhp0PT79Pr96mqirvv3eb08AAdCyfx0fGIbr+PwvLrv/Gr/Df/j/+GoqnZ2lzn7/z2r/Lc+QHXrgz43/8fHnA8GmMa4+Mmf4/tiuaeGYMlSdTamqYpKfP5mTHSUcRwuEV/uMW5cxf91yFN05b3GsUJb3zuCzy8f5+jvV0mo2OUgtl0xMbGJleuXuO5l65TzudMfAWn8gu1MY04UgdEHJb8TmuxztDf3KG38xKL6AIPxgWzo3uo2UMiMyeiIURKDoWyBqgZTcfMFh2STp9rz2+SdfvopE+aZiT680QexXLWUDUNRVFIhl9UPHjvJo/uvsfrb36e/83/7n9G7DXwA7dUvENEZag15Popf4LMBWctf/Ltb3NyPGJ9a4srV6+itebRo0e8/fbb5PmCJM14/VNv8Oqn3mSyqPmTP/jXzGdjIuWII818vmA6UawNPkFjKo6O9rl06TyBsqSUoilyTFmwPuyxvbXOezff5urzz9Prd3nrr/6Y4dYmphEUpd/vUxS5p6mFwNdy9+49Hj18yKc2t5dQ5kd5r545PiypWP3ZfxfB67OJTVuVYbUCs/xeMOhq/72CoAXEqO2fSQRpUr6JV618Xl03zBfiiF4WZav040/Q3ntIaJwHIKz3SGiM8Yargessvx3UiXRLB/ABhzfTCRSvcJdFWVIVs2XiQ9iopT9ouL7O0d5dlIJIx3T7fRobcXJ8RNNUOBzdbo/cOZ48vsflay9w9YXXuHf7LZq64r2bP2SwtsXFay8yWFvn4OCQ471d2TjqUtS5tCjsBET57HPxG7taKvEsq2xyX5LcfDxfpH+fj/XhkOl0KrLvSSSVPetIEs1g0KPXXQOlWSwqTsYT0iTx9BRLrBWD4YBulvjKoaWoG57/xCv8ym/+Ftvnn+P4cMS9B3c4OR2xf3DAzs4On3r9U0RxRFNXjMdTYq2FrmGEDpLnuVT4q0rkd3XM+HTEj999lyxVfPObX+PrX/9l1jfW22DcGVgscv7Zf/vf8c/++X9LvsiJo5j1jQ2GgzWcFbR0uL5GlmWcjmfcvH2f7/zN99nd3fUGyQjzwAHWkmUZg/6AXq9LnMREOmq9TJyPTp3fJ6Uq4sVYtPIeGoo0SehkmQ9CLVVdU1Q1dVMCEsyjaIEwY2Suxzry8vD+DbShSiJ9Ek1b9fHvvpcpznySV8xzH2g1lEUl1VwjKLf0dfv3x9+FVOgCmGDbEECoVa5NxuTwlRGlCIqbQYp3FTAJh0IC1RDARbH2yQotAq+U9H06fEXMBh8kQcxDlShQ4bRairFIBUPWRRvEa5TEK84GgRSWCmZ+jawqu9Ij5JbiLi390jvYE6hyzie9mjTtCtW/VTUNS2a7uIM3CA3rcagALNd+ifCepTHL+VyrqKcI8O0qZXtlX4Al+ONzSOucsMCcuJzFWnpnlgLiQTlMVHyDSbK1Xu7aaZFzXrmdsCMYYzgZjZhMJlSVNMQDAgIoaBpLWZYkSUwnk/aFJEra83TSlP5gIO+Efx7WiGdQAOqquqYsC6I49op+CqVS/9zxxrnSBtIYAyZa2gaoVSByJZ1VGm3FB60oxcRXKGdKkplQdVTt8HqgwpKkiTxjryhaF7lQUsNzQuK1ujG88OJ1upkAIkkidNqwXy0WBWVVoiPdtiSsrQ2Zz2Y0jWnnilDc5Jo3h0Nm+YJbT/+Sj3J8PFU0P1lCo5r1gJZkdhpjLY1SpFnGYnyCc4os6QA13djx6Pbb7D24Q9Yf0lnb4WS4SW0irI0EnTEiFdgGRQF5WFlRnHM8f+kiX3vpl/hn3/42a+s7JE3D/d0nUm5fW2MyHvN0MpbyfRzxL3/v3zIa52zt7DAvcv7sb77P26lmMl/w0itv0t3fZzodUxYFTVNR16XXd1+qH6kwQ8K/A6KB6NM7f229/hqXrzxPv7dGvig4Odqn083odjL6gz5RFFMWFWmaEUUZncE6115ZZzYec7j3iOl0xGIx5+T4iLX1DS5ducyFSxdRREwnExaLKWVZUJWlSE9aWdClrByJAeml60zT59gtYmb7t9CT+/RUKSZNaYpWgrShFFES0x8M6fT6JM2IG+8cs75zjsWiEDGGYo8ir8S5uhApZNNUbROls5JVZ50u1669wn/+P/1H9HpdKc/6BS1k2++rxvyEP8Pfm6ZhdHrC7//+H1A2lpfOnSNNU0ajEW+//Tbz+Zwoinj+xRf4j/7Ob/H2u3fo6ZJBT1EVEYtZKV5szpAv5m2C2TQ1VVme8a4J6jRNWWLrhpdeuc7de49Z277AJ7/wFd793l+ytbWF882mm5tb7O/v+wVfErenT3e5f+8en3rjTazv0fppFZuPS//6sHP9tETnp51rtRKzWvZercIoxZlEBqWWMs0eij1zXr+JaS0SqLWtiFSEcYaiKKQpebGgburlPQT8JIyPXY7TKg/c+qBKpFKFQxxM3Zy/h+Bh4ZwFp9FO4bTfYIPkc5s4QT4vaMpczrH6WY2hqSuG60MuXrnGk/u3UUqxvrHJ6WhEXReUpawfo9EpG+sbjEeOvUcPuPr8y7zwypvcu/1DrLPMpsc8uLPg4pXrbJ+7zIP3btEZblOXBcOdi+01GWshirDYZdDihSxW16Ez84Bl4vez46MdD548pixl/qWJUJhsU3Px3DamrhkVOWVlqYxjMp+jUMSRcMYj7VBmHZvFJHFC3Rg+/7Wv8dVf+9vMFvDOO7c4OTxm9/ETBv0uv/kbv07VWMaTMVVVUdc1RV5ivVSvMbWYM/tm9uC9NR0d8uTJE9584zW++c1f4tVXX249s5y1uMZx/8Ej/sv/8/+V7/3gh1hrGfR7bKxvkKRpK0OvlGIxy1nMC27evMPdew/aYDRQlpI4ZmNzg83NDZI49kqaMu/qqpBAriypqpLGGMqiat9dhfPqURE6jkhiGRftqTnKo7VRHNG1KVVVUXjTZK19cNUI4GZ0QxRrT2XB9++It5sYgWrPXBC1UpSm3+8TRxGL2QzrZM8RJUy5vlapzr/7S0DW+f1M5oRVUrkLNDoJnJdAawB5QowcRb6RW/kqRIt/rvaCyPoZR3ErPNGG9l6C2imFNYY8L9r9tZW5VdJLF5rYdfh4nzdIYhoqJLJuW8L6F27Ee6sofNJn/Prq10DlQDsvCLCishb+34FW4vFiygrrQq/PsoqCB1iWh6cYhpFYCbpXf679mgqVpLP7iYA9nqq4CkJ/wBHGNcxL7eMQdQagU2RZRppmAhQlkQTabpno4dkYcRS1z3s+mzIaj5jP59S176Fq6aOwsbHJxsY6/V5P7lVHGNNgG9OeV2uxDwjRpdKKKIYk6S7H3FmxlChLqrLyHo+KOInavr8kTYjjiLL0Ykyhf8aBUrFQvj3drTG+F9ObtlonPms2rDchCVZeiTMW8CyYrjrn+2vCe0roY3XoWOFqAeKMszgd4bzUtFOaJMvo9/pUdU2310dHMZ0sYzQekXUyet0+oYdL4TDOUZQlznvzrK8NcErx//3DfxeJjZLmX+ucLDIoQVSc9iCFQ6kEdExd5SSdlKSIGY+OhcKmNa4pmZ/uMT15ykmUYknpDraIsgEq6aJiL9GsY1m4W3Q3MI7h8f4he+UTlI5FVjnPuZ3PWdva4Wtf/jK//4d/RFmWfO6NL3D/4R2e7j4UGkmc8PDuffaeHtDNOpR1SZwOuPbyVRor3FxnG5qmpK4rynwhC3k5Z7GYUhU5dVUKH9FU7SJpnSWKO1y8eJWtnQuCoKDZPL8DwOHBHo1peOnlV+h2OxR5hXXOc4JlIq1vnWd9a5OToz1Ojw6ZTafs7+1ycnzIvV6f7XMXuXjpEjsXL/qXW6hdgaaSpKmnLjSgIzrFLpcwlBsLyjilrpwvP4rMsyQoYtR1+HTPlxsdxtbtAu+cae9PyrhS+k6zjF6/Q9bpkmVdNrfOcfXKdf5H/+N/yPPPX6SpqjNo1hkt+5Xj2eDrfUkOFpTjj/7NH1LWFdeef47B2hp5XvDuu+8yHo/Juh22z53n5776TX747iPOrw25e+c2k9GIfr/H4UHBeDQR6o61zBcLOlmPumrI53O63V4rArAsdcvaWeYLrr9whfv3H7Nz4RJXX3yVp/fv0Ov1MMaQpindbpeyLHGRvPiz2Zy7d+8zHU8YbGy2xm4/OdD8cMrZRz0+SlK0+rXV7wU5ZPGL+oCKzGpVBk/H8Odoe08/5DMlmJA3V5LUUyyaxaIQKVhnlnAYy+Tl7Fxw7QKnFFRlzfHJCbu7uxzs7jI9HYODza0NLl67ysbmVisvy8r1yCK7ynL3a1aQqXWO8WRK05Qt+rfso7I05Zxs0Gdr6wKj40PqYo6KYop8RpbEKBKmpzmmXqD1BoGY8eThHS5dfZGLV19k9+F7KAVNXfDkwW22dy4yT1Kurm1i6hJjamazCUmaUizmqCQRio5WqNiC9t45ToGnJsh12jNj93HK9v++H1VR4Vb8jeIoAmV5+YXn2O4o6jrnxtMxdx/uoWLZrGsnlOpup8tWJ+HyIKa2hj1reP0zn+PpkxOePjng6PCQR48f0886vHDtIi++cJU46bF3eML+/gGmaRiPjplNBXSRpvdYDAOxdLMOnY5w9P+D3/pVnr/+HJ1OR+ajEVnh8cmIP/yjP+Ff/Mt/zeHhMVop+msDNtc3ybLUN6BLdfL05JSnT3Z5/PgJ88Wi9bpRSpGmERfO77C+vtFSlKqqoihLFvM5s/mc+WxGvqI+2JhmCT4iAV7k1T6Vr0JFUUTXK1SmWUrWzYjjFB1FZJkI9tRNTb4oqKqGWEcI/cgDCk1DlCR+DdJY10gfitYoD5g5HGmng9aKxXxGURZIRaKU4BLlafBhbVFtb0eg0LVFBqTSG6oegeIjvR2uXQNVW6CJ2wRDzt/isH7NahcTnFM0tmmrDHES+SZ0oQw5HHVV09Th/Q0VMbmOAMC1AX74GRf+dK3vS5BZtmqZwLVJjRODyFY5LZypzQgUvnE35BErYDPeADSYUXrwSum2shP2kDBeq4Nrw4cFKX2F72mTtVYpLf0yQDDldP7zHcqbsnoAS0l/20opzX+4W9KOQ53HCQ3QGoeOgqqpodeLvVqn8oai3vOMlYqHiqiqmtPRCdPJBONFo8K87/e6XLlyhY116fEVaWWhStdN5X2QpB/MNBI9F2Xh13FJ/HQke3ASxdLbrRVJrImjniQFTS2MgtoQRTE6jsINS89KrFu1vWDGGmkNkai0BqGkQPNbFgsCMOjaaiYIeFu7BmMbdLw0qZehF/EJUTRXRHGC0g2LqqKsGvb2Dnj6dI9AqW4tF/x+HNYIAljaWjIsWRk60uhYewaXXqFj//TjYyU21jmaloIWhlTjMEvERscyeMoxm83EWdVaHj18wPa5c/S6PcCisFhTUlcLqsUYpyyNAR13SHtD0s6QKO0RJRlaJ+hIGvijKKKYjSnHRzg0d26/R6/fY33nHE1j+M73fkCcxGhj2VjfoJjPmM9nDNYGlHnOfHYkCilZh976OTa658iLBtO+0BFK94g7PeLOujw8JIuUGzM4UzM6fsSD996haQxb2xc4d+kaSdKhcQ7tHKIeKgvgxStXmM/m3Ll9mxdefMHLAxqfjCwVK3SUsn3hOYYbO0xOjhidHDCbzRiNRkwmY54+vk+n26Xb7dLt9kkSKW0a62jqmqYuqWopYdrG+mfmJS79YirmXrSIdHhuUo5NSLIOnW6XNO0IGpCmxEmM0rGfjH7BAzqdPlevXqOT9vj1X/8VXnn1BeqqWglKvYzmhyQwq3//oD+bxjAZnfCHf/BtkiRjfWNIWVXcun2Lo6MjrLVknR5f/uo3GE3mnBtmPH78gLv37vHk6VOuv3Cdra0dirxsOaCzyZhef0AxGzGbz8iyBIeUnSMi74KtWoTOVhUvv3CVm7fv8YnXP8vx4QHO1lJeNQ1r60Py/X2vDy+b8KNHjzg8OmC4tb1Euj7i8XGSmp/WZ9MmMfKP9mtt5aVFr872wihv3HqmX6bdwNTyfDJIy6+tfG64trIsmc/nHt0ShcJOt7fssQrAXduIe7Z5VCmoa8PxyQkP7j7g9s1b3L93n6ODQ8pCEgGcp1poy2Aw4Przz/HZz3+e5196iTjrYn2VRlJlQfkCLaZN4Hxye3h05BftZ+epZTqd0B1eJEtSLl25zmi0j45TkkSRpnFbFVQKXwlsIwJ2n9zj0pXnuXTtBY72n2JMTRxHNE3F9sYGYDFVyXs3b2DtDbTWdHtdti+cZ33Q843Fqu2hMF6Bse0RC8mx8wHMitfSz46ffDilPPcbOt0uG8M1Bt2MXi9jEFVU1nmZVz9ZtSJKI64/9wI2LxlEJee6ESd5w7yo+NM//UsGW1d4+mSPyXjMxfPn6GQpDx4+5M7NG3zq059lMOgym3VpmopO5zwvXO/S7WQYYxgMemRp2lY/mqbCGkPkvSiOj445Pjnh4cNH/Ojtt3n77Xd48mQXKfBpL3YTSROwVjRG+i/KpuTgYJ/Do0OMWfaRaA3nz5/j3M4OcRRRNw35omI+m3F0fMx4MqHIC4oyx5qgGCUBqNDSgsKWJCR1I2utc1CWFTiYTCYoFEma0Ot16fV69AcDUf9MUjHkW+tTVg2LxQKM8eCEdwyxRmhBFj8uohQV9lDnLGksXP+yFCPQshRZ9sibiYZGe1CYxuEi26pKaaXAm1AqgiRwoKkha4xSbWVGKVp56CAi0CZ4Pvhv6d52mZzIFyRoS711hDRNSxWmqoQd0SYqtEuIGCmqyH/GiphFOKn/gKUQb4BwQqUmuNf7YMfZdh+QRGa5W0kQGnodlmAf0PoBhR8O3BXx11pqlgn670/t4ycZFk8qc1I1D3R1OdT74gPnQsjh6cdtoLy8jrB+yylUO/4i9x8GMfQDgTFCu2uamtlsShTFIuSUSJ9b5Hs7ojgmijTzxYz5bMYin9OsqKT2+n2ev3aNre3NVjiqqmrm+Vx6zOqg0id9ZsGQXNTOfN+UN7ZPk8RbqcQkSUqaSPIeJ4m/DkWvm4rhZtWIepoHI61T7bPVSrf7l/PzIdIhknXLZ2SsxEXOU/6hTWQdUHlDXam0iphNEkv1KYCG1kkPjZjaZzhrMMbR7fXpdrsteLAce9Umuso50KqtuAbxLoI4kK1RjVyncdLX9VGPj5XYhFKy8k6/1oTNX5D5SEfoLAVniaOINI1prARJg0GPWINWlo3NIUdHRxjjh9pZ1tcHTMYz6nJKUc1Z2Ccy/CpCJxlx1iNJOiSZIOzj8QkKR6c/4OT0mEgLelk3jk63iwH++gffJRuscz7JqOuSk+MjtFb0Bhv0htsMNi5R26XySpj9Sp5v23vhCAiBlGGztTXWMXR3H3P+8hWybt8Hx+EtU4Iw+QmbZR3W1oaMx2Nu3rjBuXPnMdaJU3nWYTYvaBpHojROWYyL2Th/jY3Ncxwf7TIdj8jzOUVRUpQl49GIpXqMn2DPSCyGLFnHMVGUkKRd0qxDlkli1Ov1yLodev0+/cGAbrcPCopSZPfKqqKuveOxNxyzKM+vTbh48QrnL16gmBV85ee/yC/+4ueoytIvirI4rSY1H/Tn+5H5Z/90/MWf/TnGwPbONqaxHB7u8vTpUxpj6A8GfPbnvop1EVv9iP3dR8xnc548eUyRLzCNYWtrm+nklPl8QVmVjMcn7Fy4zHR0xGw+Y2O4Jsoketk8ugLFyX0Yw6svv8Tdx/t85ktf4y//6F8zXOt6d/GMNEnl5fSqH/v7++zv7fHyq6/yIcWqlXt8fwXl4xwfVOl59s+2iXKlGqNDJWUleZH/dBvEeSwJ6VFRy/0Qlgitn4ch+THGsFjkLBZz8nxxpnJgrfWJfOMNc89uYMr3tjlnmUymPHrwmFs3bvHerTvs7z4lXywwrmk/XOZPqFQAFkbjMW/96EfcvHGDl156iZ//xa9w7cWXcXEmH6SXm2g7Th4FLYqc+3duolyNw2JtSNDk1hezGVEMJtF0+32sOoeKpTK0vr7Gk9mUPBc59yhJls9XyXXuPnnAxavX2b54hcMnD9na2mawucPOzgWmszmDQY/nr3+Sum4oy1qeWwT9JPaKWDE4R5ImKCW9e42xNHXTBnRFWVJ7ZO9nx0c7er2E7QvnGa4NuP7cVfq9PkcHR+weHnCsRZ5+li9Iuwnnz21x4eJ5nnvuGhtbG7z71g3u//gGT08thW2oo4zaOZ483aUuCy5e2CHWETjLosj5vX/1r9kYDtm+eIWrl875XjLpFSurEmcdo9FImnpnC8aTMePxiMV8xngy5vjkhKOjY05GpyxmCypPoVNagm5pttU0VcPc5WKj4Cymbjg8OmI8HrdrQBxHYrB8+YL0wDjDdLZgdDrm+OiI0XhMnhcrANWqH8oyqFQob2HgrSCQz0Mp6lqa+yVGdlRlRVmUjEdj0ixlMFhjuLbmqzkZaRwRr60J/bksSdPYq0stKwfRSvO3VhE60dR1JabL1qAVLPKcqqxAQVM10qvm1aZ8eWFpuIgXPnCAD9SMBDe0eCYQwIMQHEslR0vvT/CCseG6VEtPCl8DWln2JJY+LYVUX7QPNIs8Fwoq4M4YE+t2X3UYrFmOf0hiVLswr5pWhpikLXJIBcn5RMmP7bInJ1ypVJqW1Ti/Z/gYsLFG7i+IPPnxU+GD2hFzwNLTTa6/WUmW/FrsAvYW9l1QOjT6Byox/hqXgGzY/tJUKJPiy0LrGdYmlHZZyQpbmSRKEtzrSDyLpLdOKORpnKKUZlHkNFUFShTr6qoiijTXrj7HpYsXUUpJH5fvjSvKmqoqKfIFi3zBYiG9uNauJLeEZ8XynQLwimxpltLx9K3eoEeSShKcetAi9f0uZVlQFUZk2Fl6SyqtW+VQHcDKSChk+HEC5cUnag9U2jMQpdbaF+68vLupIY39s5DqV1XV1AY6xKRpwmJRY6x8b2tzwysKSg+ttdb3mUslOI0iVOLnhvIAgHXEsZb3zy7HxyLWHB/1+PjiASzRQR3JBcdRhEVLyUqJuklZlOTzKUlvSJzEnD+/A87RNJbR6Yk0kitBKSxO1I+aqh1g7S1UlbO4Jqeq55Rh0Yhism4HYxx1VbKxuY3Sik6vz3w6YbFY0OsNKfMFs5nItepIs3VBkpDuYAviHtbFwptVbuWFDoiFYDc+ycQ5ocJFWUbaG4JruP6JN3wzZCXBoJOEL/FZt3Gu5WZaa9na2qLf6/D4ySPOnb+CtTBYWycvKppGfHWiSBPH4lHTH2zw/OYOps45PdpnPDphMjoVjrGfgnEUEcXirdHp9OmvD8myrgTcnZQoTlFKlFbiRHiNaRLTzTJ0Kqo7s2nOaLwgz8u216GuGupG6HadLCPpiZNsmqasr2/Q6XSYnU75lV/+Ct/6tV/CNFK+Da/FB8XpHyWpCX83xlKVBX/2p39Gt9ej3+9T1zWPHz9hsVjQ6fS49uInOH/xGrqaMT45Zj6dUeQ5R0cHgOP09JSdc+fld6uasioYjUacu3gV5wRZaYwsspHzWvHO4pTFOoUpS8pKRBKSpMvp8THrUcSl557jeO8RaSKUkLXhGqPTkah1Wct0MmV3dw/bNKDjDxyDZ2abH7Ozg7bKLf6gr6/2xLT/xicbarm5tlUYH4Cs0sxoBSjCuZabxWrg4lYebKBY6QAm1DX5ImeRS/IYbknih2C65zejSHlDsrA5ySfkec7u0z1u37jDrRu3efL4MfPpRHxfnKXtqHHLZsvW2I7lvJFETLEwhnfefYd7d+/yyTde56u//HV2Ll2Tz1YO2xjmsyn7ewc8ffKYJ092eXjvPXafPEKpFQTWj4PDMZ9PAZGAzdKEONkgiUVa3dqG2XROXRZeGncFlfTjbZ1h7/EDrr7wMtsXr7CxtUWnv0ba7TN9usfWxiZxkoCKiJNs5T1paBqDaaQkX9c+iFqJupTTDPoDtre2RYLU1Dy89aP3v4Q/O953/O5/8h+SdFOSOMFZQ1GU7Gw/z2K26WVhpQLW7XVJ0xgVRd6bC774c6/zxpsvUeY5VdNQNoq1C8/x1lt3GXSz4F5IU1u+//0fEGt48vgh1198GRWlPigSGnBeliJio730LrRGv9Ya8jxnf2+PuhEqVhRHxEYCWqdDNUEow03TUNU1eVnirGV8OpKKi99DkyThwvlzbG1vo5QiXxQcHx9zcnzCeDz2fSl2uTaAl/JdWbN9MA/4+zAtupukKcY3Bp81qJSft06MjsuiYjwa0e/32dzcpD/ok6Ydut2MOI6Yz2e+UiJsgbqusaaiNxgKV6SpWzR8Np3SNBXaG3Mqr4Bl8cmFCpVoCeitE6nbtvc/VDkcbXKhJRzx3+XMPcvXl2yVtmDA2bVZKdCxB5GQoDPSy4QiJBdVVYmkrVZeTKCN9lcCel91ElMT6QPyFKrIG5wqj9o7WAJSIWnxp5ReaQF0w17gVtarUDFq79Mpn8tK5SuoW8ZBxc2F/g2f0Dl8jxNY54Nm5TzFXW7J2rCm++fjXHsN4aaX1xKYA8jPtXuHnK8qaojfvx9q5XsVz8w/T03zFW7nAkCZUdVCQ078fRXFnPFsQpYkQsvMC9bW1njpxRfpdXo4a1gUCxZz6RPN84Lj0xGLmfguhv2qvabVv60mWCG29j+fL3KKRcF4NCZOErq9LsPhkLW1PlEcE0cxcSzJj45lzXIWodQp/9yNRUXgiZVob/YZ6nlKeWU732fc1LZNMvDAZxz7Hs9IY+qGuizbZyF9czE0DVGk6XbEp7Gqa5zTLOZT+UzrmM9zDo+OWR9K7KiAJtGoWkAf0zTMFwustexsb6K0ItGRt3BwoBXj0ZSPenysxEZKz6B1DM43EFmL8QuPVpooySjmc8qypJjPSXprGGN58uSQSxd3UFqafXWkpFRnG3COqsIjBFLe0gqvTqGlb4SltKw1FeVCGp7KqvISjB7V0DFRHFMUC6FSZQlxmtHvb9IbSuDQGMV0WlKWhfCptWplHIWKJAuNYVl9wInLrG1qTg52sfWCpnGAQeuoDX7CBJWXX5KaYA4WRRGDwZCLFy6z9+Qxl65eZ204ZLFYMBrNaWoJXETGMSGOE3Z2tkmSiPWdbXCa4/0nLEYTQdyylE63SxRnRHGG8qpyoelNKUWapGRZRtZN6XZSIi0S2+PpjHleYI0oeFRV7WkDziMGXXqDNbQSVZtgNJnnNZPxLlcuXeQf/cPf4bNfeAMM/rrDwuK8x8EyMP+gv68eZ9B7v9i9/dZbPHz4mIuXrgCKw8MDJuMJcRTTX1vjjc9+mXwyppPUjMenWGM4OTr21RPHeHQqghKDPqPRKSDS0cPhGg+McEdni7k4dFclRVFTFCV5UVDkBWVZUjcN6xsbfOqNz/Lg/ntcque88eYX+fajB2SZcOKzrLPc7PwGtbu7S5EXZL1Bu5r9pD6bn9Rj877k5ZnEJiQlAcEK7sjtz67KLof3xCNwH3gt4YJXrns18THGUnifpdxXxvxayFLByLXPURoKHXXdkMSyyTWN5ejomHt37nPjxze5e+cuo9NjbF1LQ6q0N3o6iMzn1nzuTGK8mtgElFIu3DiY5HO+8zff49H9h/zyr36Tp8djHt67x+H+AdPxKWVZemRIKDUSuDyLCsr5qrKkyhegEgkmnCbPS4Zra7z33j2KYk5dl0Q6Is/nIhOayoYIfrPB8PTBXZ5/8RM0VrO+tiYodj5l/eoFtFOkcYRz1stWa6yLCdQO49813PI5OXwAZ6EoZOMxtvrQufaz4+wRR5aqmLAwhn53QK8Xky/GEBU4EIPkLGMyPSQqNGmaop2mKAv6gwFVnaNkG+HkeMbN9/Z4+OSIsqyI4pitzU2ODo/YfborM9Qazl+4go59AwGqVcECEYFJtBaATxAKogiyzjpxpJlOpxin6CRdFouFqFpp8WypG0NZFf59Ec58kec0vs+maRq63Q7Xn78m/Z5FTp6XHB4ecHpyynw+9/LIASn3lY7AcVcrkT7Kx4sW6wVVVLNch2ygPynFEhkJiZGfv0r8L8bjKYt8webGBhsbm6RZhyhJ6K8NmM1mnBwd0YakSuh1/f7Am/TWPt414slhPJTiq8DWCeUsinVbFcYBRnmqtm2rDKIe5qmpZ4JsR9uDqJXvwfGjoDRRHLWS0ctx8+tIWAstElhai4rDuh4RzIXLqpZEAaHOhupIAPRDr08Ag0L1ITSSn9k3VkGpcA8r3l+OleqbWyZgy+dqW4DXuSVAJcmuJY7fr2x5hvymHFrFPotR+AgbWJopK520cyAkGEvlN6ECmkYqZ8onXqqV6wpgtMNZD7w1ISmT+82yDB3p1rS1xRDbBF2+oHVMvzdAenWkihFpyBczTk5OBCDQQte6evkKV69eQaGo6or5YkaeF+R5zsnxCbPZjMZYljLX4ZqCIIEisCCUU20yuHxsnj7WqltKb9hkPGEymdLtdtna2mCwtkaSWJJY4t1+vy9076YiTmJqU0Mj8Um3I/6NkRKLAOcssY4YDAatqJOIBDRnku0sS4i0looWIk1e1Y1PnhSNdQJg+D0pjkRIqzaGxlrKouD5nSH9TsRBL2Y6n1HUNbVpiJQmiSO0gkG/w9bGgEG/y+Mne+ztH5NmCZlXg9NRxKDfYWPY46MeHyuxMcZhI3G/JhKOY5YKxUNKjNJEVJQF86rAICoLvW4XuzbwJVxfxvLvWqRhY7jOfJFTViJ1J0162k/aVYpQW1g980CW5UwLtqYpK+piTk4occExTwRhiFOiJCWKuxinUFFMFKfooDQSRRjjSJOs7TsRJoGlbAxppOkmMUXdiAa9d4SVhUe3KlvhmkIpv9frt1SBc+cvEicxTx89odfrMFxfo6xFkjKKEgYD4Sdubq0zn88ZHY7brL+3tk23v9XyEkUNJsJ6jDjNEjqdjDSVfqRI6Zb3eXwyJl+UFGWF8X4fxtMJZcGOxA8BGbOmERSwqgzz6ZRON+MTrzzPV37+S3z1l36efr/nS7xLRZ2Q2NR15ZNN5ZudXfv9D/r7+2hJ2vHnf/7nrA03yLKMxWLO4eEhZVnQX1vjU5/+EmWRMxxE7O/uYY1Uzp7sPiXLOp7Pajg5OWZ9uMkTdv0iYbBOUeQlR0cnPHzwkKpqPHqiVu5Bjk6ny7XnXuToeMTVy5c4Ojpid/eA85evc3r4qDVVS7OUphLhBWMshwdHzGcLOv0Byzt7//FsovdBdDK/nfnxZFl9CZWXsDmECk3YeMVco01gwu+dXUo//GgrDW1VZsFisaCqapzzjaNOAAlB0ZYJQfgvtOVa5zg9HXF0cMiNG7e4eeMWB3uHFIsc5yrwbsqC8DXvq8ScQRBXkprVP5/9fNrPrrj/+BH3/sl/iXW6DZBohT9XBEDV++fmytNidHzI1sXnwHqqRlUzHk/Ze/qEQb+PaSp0d4Cpa17/1Jv81n/8D7hx4w7vvvVdHj8QQ09rDLuP73Ph4jWscZwcHdDvRHQ7GutKv9Y5suAPgoiRiMO0JDB1E5yu/TvmVFvJkiPiZ8dHO45PnuCAvChxmxs0pmGRT4kiiCPF9vZ5msaSF2PqWnjnkad9GDP33hUOR4QxJY21RFqxvr5GWVbs7e1TVxXrG+ucnp5QlGWrGOacT+E9BUNHkQTiDmxtxPNEKTpZl62dLY7TlLysMJ72W5UVRVUKl7/x/ShO6FrGiWdLa5LsoN/v8fxz19A6YrHIOT494ejwWBqiG9MGvIF2Ft6DAJQAOKV84UL5mNT3JKoQwIcAXPbw8L6tQlfh3WzpVc5SVY7Do2Pm85ydnW36gzUi7xmmteb46FjWCGuYViXz2bR916MoEkEc41hS5gTqUARZX9mjmqrBWnxv69m+EB3pJUXMV3WCh01bzXh2HbJBRCEE9Uv7hVUjSJQomoUMQvuENthbpGmCIl1eu17ZH1rAkvazwzoeGvBVKAG0lQBaql1bpfB7vVtZH856ZC3XD3+XK3uFTwi9QIRqv7r8QCdZ0DOmrP57vloYReB0hLVG2FOAw7ZBLC5Cka6o8RnquvGmriFh8NemlomeW4kXUUIP03FCFInCmrHLnqEWCNSKnZ1tur0MBSRxitaO+WzG3t6eB0lBE/Hi9eucP38erEg5T2czyrLiZHTK6elYKF3hXsPzQi2Nftv4YqUnyJ3dZ5R/xgEUXGZjctJ8vuBpntPtjdne3mLQ75NmGXGS0u/3qMqSxWIh4gJKyfvfNPS8GbmY5oJSQgvvdDreiqPyY+98kULmQpAXF6EJyYFrY4h0jG1qYTUpqc7VTUOSZSgdY1GkSrGTada70Dh/Tg+qhrgGa+klmkvbAx4ejIUF4pSwsZRBW+jHEWu9DE3GRz0+VmKTxumZwD08EEE3JNlAyQKzd3BIbzikbhqiOGa4PmhRI+e59s5ny9aJH8DCNuA38HaCqEBLWyKoUaR9AvR+NDuO46UrMEuEBywYKwoVRfgFh+RRokqiowilI3ScoONEENcklf/ShDTLMFaRz2vvX2KQYpFccxxHxF4iL1RoIh2RpClVWfoGQUEOet0+Fy9f5OHD+1y8fJXz57ZaSsLGxhBja54+fYoNCK0/woIp6JFumx/BJ19FyXw2p268t0cdtPtD8qlRKmIyPmFraxutpToEEvhGaUQnTVnMcpRzJDhefO4qX/jCm3zm829w+dolkkhQJuErLxHysFgYE55NUIJUK8jF2QSmpRStLNo4x/jkmDu37tDpiOP26ekpo9EpSmvWNrc5f+UatpgwGY2wXk56PB5zfHpCt9NlOp3jnOVgf4+rV74g/QlIkF7lC5G3jqR8f3x8godOlnNaaZIs5frLr9BfW2O2v897t2/QyRLeu/UWn/vSL/IXu/faYLrX6zEuR+09jsZjZvMZ2+rCmeR8tRKweqxWX1Ar/w7JjAoNkyFxUa3pWFAygxXqWZvsLN2eV5O3DzvCdRljyBc588WCosiFZtJuJD5YeSaJcdZ6tURpwq3Kit29A27dvMvNGzd4/PAxk9FYFlH/ThvT4BBUTio0gV/+bFKxSo8IVRyZO7KuWExAjVUImgCvbuTaxtizPjcfVEVbHfuzSbhlf+8J56++IO8lUFeG8dEeVTGjTiKcM0RRgsVx9dIFsv4Gn/rcl3n105/n5PiAezff4saPvs/J0QFpojFNzfhkn8sX1qmrhafJRKCEZhOUoFSYk176No1Uq47kJLv0ZmeyNtT1T2ju+tlx5siymE4W0etFJHGDoiFJFGXd+LEsqOoGa2vSJMGYmk4nJo1jkdv3lRbTNBRFjnJCTawrRa/bYWN9XQINrbl58xbzRcnh/j7XX3ylFeJRVJR1zbxYMJ9MWczn1FXJIs+FsuXFIuqmpq4az0Hn7HoB6Fh6bIxT2LqRao5f1/qDHpcuXcBaS1lWHB0dcXxywmK+kCDTrxFn3gkX9p+AbgcVNdnvHMFrRLV7rdgPSFXAGhE8MMa2nHl/JqJIk6ZZ682Gk6RoPhfwZGenYn1jgyiJ6fYyzp3bYW93l6auziQRyjcgS3+R97nxRqtOKZIkFm6/s0LptOLdEpqVQ6AZxyL1HWiyVage2yUo5NRSXSzSSzPLNqdQS7+sJTilV/ZJxcog+Gcf5kCIV6CtKjx7riAzzZIdYfF9FCwD4tVEFPDjRatoGZK59oOhPW8bawUQTIUfEop25Jag2vLn3Jmx8RPJf5Y+cw8Qri9a+Uzdbv8hmHZIwJwkCUmSYIwVxbhQ1bGSTIdxCCdWfj9qGiPsl0T8lMqqWc5tP65Z2pFn6MB5mem6Kjk+OsRZ03o/vfzSi2xsbHhxh5yiEFGcvb09AUR8ArN8uMv5Ke+NUN1xqjVctdbS+N4bUY/1MfVKhSc8O1/HQSmxLVjM5+RFzubGBjs726RNQ5xmJElCvz/g5OSEOIopyxylFLMkYWN93SvfmlbVTuuIKIpoGpGwx7n2GZrG0y6diCwELx0Qyr5yGqs0kYaqrun0uvR6AxEmiWN02uHm7oSdzQ6lihgO15gvSoq8kH78NCaJNUXVcP/pEbWD/qArHld1IxWhKKaxjtk8b+m/H+X4mFS0JS0jBGdB39764DSKYimH1zVXLl1gNJpjbURVe/Mvfx7rQiNxzXgsG3rAfsR7YkX3vA14FCFQCS/K2UDRO6c65xMnjwCvNO8t3y35S6ycl4CtcdaAVTRNgXOO8tkgXIWEYqmehVfPaJWldEhsYs8vlIYpRXBdVqRp6q/bMJ/n0gjfH/oGsYTDw0MvD7hEotvVCq9G0yaWHv3RGqVC34N8L1aapCMIUBuoocgXC2w9Y214VfqPlBaO42SOth0uXjzHZ77+Fd5885M8f/0qvUFP0HevcR4SFxnFs0kNIJsU/vH5gNe5pZhAcJYNr+uzz1Irxfe/9z2mkznnzg9EZvH0lLKsyDodXnvz88ynU4aZY5znXiWk4unuLlVVMej3fWnVMJ1OqU1DfzDg9PQEpRTz2Yy1tSFVMaff7zObzSmKop0gSimSLOHytedFjnc24f7d214mG2aTU2xj6PbXacoZw7Uhs/lsKRvtHNPplOl06r0ElnMojNVq8NBKeEoW4ueR/1N2gHZurc61UMX024/PY5bP4+zL+763mbObloxhPl+wWMwpqlKUiPx1RH4jaNVLnN+A7HIeWucYTabcv3+fG+/c4L3bdzjYP6QqS5SXaNVasbHZw1nH8ckxIvcckpZl0+yqSk/7/uHdzE3TKs2EXgQZB+0FCoQuG4LJyKvPWCOoYNM0NKYmUEzPYMnOsdqHtFpRtNZS5gvy6QSlMzq9lG6/y/7uQ3EG8ElZFEUopxhuX2BRScBmTUPW7fDia5/h/OXrvPuj75PFmsODfXAV/U4HU9cYpbCNbj1HcA6n5d1XjdB4VVh7rPXBhKyVWkOiAOU+vkHZv8dHXRdtkOUsJFlCYmq0kmdZ1qK8F0cxkdL0ugNc0yx7K5UoWwrlOGIyGVOXJZa6fbdjr660sbHOyfEJN2/fwSmRPd/d3WUynjBbiHKgqRsxak0Sabo1lqqpINLCfmiMl2UNbuoSGmvtpVG1wzXgjPE9pHJPO9vbNHVDVVUcH59wenLCIi98cBqOEKa2uIj0cHrATmntfWjitvk+JFWO8L44GuPEddwajLOUZSOKSd6bJ4B4VVWu9CAsqyF1XXNwcEhV16xvrItSZxJz6dJFHj9+LPTXUEk6c/UOvJeHFBdkv0VJxaCp6rZ/Rimhp0UeqGuampC8gTirR74POPJfC3iRrPX+s0MVJSQl4T4cEqAG5UhBg9r1fLn2LhORUK1tkxOcTxLDz/sfDnurA93OAaE3hZ9bXb9C5abdg1Y3BLXcHsJ1WLu8LpYzoj1XaOrX7WeE+/PrkZKUK5zXebpbeCarfTThWsNDi+JI6kkOTKT8OPp+4q7QdJu6ofT+R+G+2mv1PUoAdVMTxQJMN431YjZBiSzYl8ieorSmqWv2nj6lqsWrL4piXn75JYbDIVVdkc8WVHXNyckJx0fH3pBWtckpyqFVhNLiVySPSgw1JQkXFg0oL+y0jBmCap8AdcsEW5K1MFTLZNI2htOTU/K84MKF83SsJctS4jhifWvI4e4BZVGgtSJHqoO9/sBXIAUYjzwwkaYpdVN7GeuV59POYbUMQXHtv2PvvZREMZ3uQFhTSqis2xfO0Sxm1GnKxnDIJ14/z9raQNgyx0c8fPiYoiyIdUR/bY2LF7bpZBlZt8tkMmH/4JTRaERVVTQods5tA7f5KMfHlHtezSLdmU0/SPspHaGilKyzhkPT6aTkdtnkq/zPS5kx6NEnslA753nlfvD8ZDmL4krJWl4+s7IghGvT7YMKcnqz2bw9x/L65ZqiSLWvX7vwrLyo/jtt46TDyoJtaoLdUvvOOzwdxK89PtMdDPpUVc1ikfuXKW6DV5RidHyI1hGd7oDh1g6dXh+ltLiq1xV1VbaBlfYTqi4r4iyVypD3lonjWBq6dHDRdW35Xah7mjTLOBwfMxj0GR+d0E07vPzKdd54/VXe/OynuP7CNdaGfUKvkyAfzcozUSgC0nI2gJYNq/ZUpbPJzPL5hUDyLCo4nU3p9XoElOp73/suvcGAyDeQjsYjtIbN7XNsbe+g6wWj0alPNkry6ZiTg32cXxjaBNxa9vb2eOmVT/DkySOstUwmIzqdPvP5pJ0LysshKqWIk4TLl59nZ+cS1jnuvHcL04hKlfWLwmw65srzL3Lvxg/aACdJYn/vjsV8zmI+a0dtiV7Rjlt4hyTg9xrxfsENc6NNZEIlpkXi1Ac+g0A2OzvX26tYbp5Oetzm+YL5Yi5qPB4RkYqMayU2wzywJoAYAUzQlFXD/t4xt2/f5sa7P+bRowcsplORYfaVtCiJGAz69DodojSR0nZTs+H6TMZTTzFYXmM7VH6daOqa2s9B0xrnhkBCtWiFjiLSLKPT7dHp9ojiRGg4WhTFZKMwUlkyDYv5jMV80gIjz1YTgVYgIYydtYaH9+7w8ie/QGMasiyh0+sQsSbCCX6j0DpibWOHqqzQeKO1qiJfFBwcnmCJyKuak+MDtod9kTH1wbHTosimtafNeb8I56VeXdOgo2gZbDnrXakVQUbb8jMfm496WGu9yIrywAr0uhlVXQtIVyzI0ozGVJTGYmxMJ00Z9PqUZdmi6FEUcfXiBvt7FY9nc4qyppNllJUk/VophutrDIdrgOLw8JB7d+9zOjptgzecVAJirXFaU/v+EYWiKWtsnIqgANZXKbTfVf389WCDNQIEaMQLY3N9ncbPwcl4zPHxsfR+LeOXtrIrf4c4EXPNNE1EeCbNWlPQkCis7n3KAb7PMo6975wWClCv55MYXy0KSn42BE5tJUPqDyihip2enGKMYWt7iywTQYELFy7w9Olu69PivBecaQEzAfnwwa6opTZe1CeiqawPPMWbo64bFNr3tcYtmBMCSdyqLDHts3KhN8+DIdrv6X7lkCBfx5wdZEJcuHIEwHIJsoTzyDK0TF7bRMHvk2G1D8lpAI4dy3MpBSpSokwXkiL/c37q+J7m5SdEWvlnHMjEfh9iKZEfKEUBpGT17yCgL3iQyvfB2KXMvvZ9gT6qWkmQ/RhbK0mACiAahKQk7WRESUxdNTR1I1U4bGC2LUfJOaqqJk01cRK13jtRy3ZBzHFNQ1M2HOwfUJaV31sirl9/nl6/R1mVFIucuqzYPzxkPBoTpkMY4wBqS+KHfw/EpqTbyUgS8TtSz4B2OLygjlQUKy8RXeQlxjZCMQ3PLPy8f/7WWvLFgiePn7JzbpvNjXXIMrI45dzOOR4+ekhTyd5ZjUbEqTCQgoAFTuNch5k3Otd+LoVWDxWJxLiAPt6WpAVXNVmvJ4CPj2k7WRdrG6qqYefcBl/9+pdYFCXdwXCpiFrlXLk44POf/gRiQlqRZh16PaHF2cbQmHMoIpqqpqwNFlGA+//8wb8Dg86maVqUJk1TL2FnfbO7lLHqumJa5rz6xhfZe3KLbq8nPQxOKiiBIha4eWEzqevmTKzLSuK0ym9dfvuZ4Hj1lXeGsswpy+WC41bQoNVzLI2B5OWGYGi1XMHC4hL+f4l6sLwupcA3H9d1KbKAtZg4lkUBStHpJL6CJBM4y1K6HTF4bJqKIjfEs4jJ+JTx6SGYmsRXhAKqEt4H7aDBtYtYG5gpTZxkZF2Rce4P10k7HaIkpWlqppMRrjJ86Uuf45vf+hXe/MzrbJ7bQmmpLon8ofHIVIvjLMcM3xTaVpOWClLGu0DbYN70TELTLrXt6iVHWZXEGqoiJ+10mI7GPH7wiCzt+OrHhKIoyToxL73+afLZjPWOoilLsIpiUXB+dMwbdc7bg6GIJWRZm5Ad7h/xuS/+Ehub3/c664YLFy/y5OkjJnUlGulRjHIQxzEXr1yjv7HBYL3PrRs3GZ8ceXRU5tIbn36TOFKsbZ3jvR+D8u+DjmJQwrOtqpr5bMEzU84vbKubjudWq1CVic5UDNrkZQVWW/36s0f71ZXPlZ8XmlRVVcznc1FxKStfHV0mMw7xRQq7qrWWxicDgeI2ns558PAJP37nBnfvvMfJwQFNnWOcGJAJMutw1ohnxaBHVZXkpaWc1UIpqEUyM4pjjG1QgLEN1piWLhIqMwGh0sHzIRZZbaUVWdohSXvoKEYH0z6tQCfoKCZJJRltTENVlVSFmO4KH9+068zqWrM6bs9+zTnH6GSfYnaKjTLWhj1wiqzTZzrdRSEbXL/XpdPrU5UlOOsd1kXm9mB/nzhJ2X/4EGVLhoNN0eo3DqcttMZrS4UirTVWg7aC0ltfqdKRzI1QTX3fPPjZ8REOh/KbeVWVNCYliUVkRQ5Ft5thrCHuii9bADkaY8jilDRNyfMcR8PW9pD6RsN0OmEjH9LTmmAiGBSsrLHki5zRaCQBpZPoRvowtQh+AKGJO+xFTVMLWOUsTVV5E7vYU22d7/EzbTBKHNHvD/y9VUwnE45PTiir0gdYtEBeOOJY0+l2SJKEbrdLmkjfpTVGaL/WSm9pW7l3sn804tAeZFklwBPKWRJHxFFXaOe+ElWVpe+FtG3VZgnL+AqtVUzHY6n0bm2SpRm9bpcLF86zt7fvkw2paixZCb4C5YNp4ynZzjo6ncwnKV55s6rFNyROvAKd/2QftQawMySdwTekbdh3qu0TWU3yzh7qzB+rdLG2MnMGEQ/rvT+ZO7sDS1VGt8BOiEGWTBbaCkn7O7Iw+b1nKYUcqgJNu/6B8rYFEksF9owcxhrpqTBOnr/vp2nXHn8Tqyya8Pt2pcLWqrYFOWclMaENcsRngMCwL8rXjBF6YKQiok5EHelWdljuh/bZhc/Ce+y09Enfc1VVNVnWoSxLRuMJRVW2j/HKlcv0BwNPMS0oi5LDgwNGk2krjidrcISOaCmaSiF2GmlG5oG8lnnDknYfxRHO91c5/3yiTJr2rXX0uz1Kb5BbFlUbz7jQ0xpgTCeeM4f7Bzjn2NzaJElF8OTS5cs8fvxEQABrOT0+Zuf8ef+eWCIFaSejZx3j8Viek12aPdvGEHvrEvGccSKbrTRpNyONE1zwV7KOxhjiJJa1oqqoyhxtc6rJnMbIuz+fTahMLQIIcQKmoZrNqOYSv3Y7PZRVVGWJqStUFLM26JOos2nrTzo+XsVm5YZL75MQJrS19swi+bkv/jx/dLKHc1VrVidBbSj1wtbGOkprjo6OZHKvlC7De+1cKF2qM+g/rCYmZ6tHOujH+8VLq4hev9siMfP5QlAatZLs+NncBp4BvXLLJOqDgsnVF9qBePzEEcprkeMcjZWFPo4jmSgecaubivV0HWMMdVHR1CUKR6wdrspJ05Q0kiY+pxD0wi4TutSvotbL4TaeJ11WFVUxYzY+YMc+R5wNaTwf+9e++Q3+/t//Ha5cuwKePmP8pG9fFG/F3tIMrXi5WIQK4xpDWRZMpzNOT0acHh9zfHzMbFrywiuv8HM//xnZkJ+pUrTEsyUU1R6hL8k6uH37FmVRsrYxkCa96ZQ4UnS6Ay5feQ6Tj5lO5yilqKuaejrltfGEy4Mhs2vPkVc1VVMznU2Jdcx8NuX09JQ47TEbnxBFBWl3IC+ONbL4eV+DnQuXSftDtnZ2ODk+4vHDu+3Go7Xm9TfeIOmscfPdd/jSV75GmnWpq7LtwyqKgkDRzPNCghl/qyv1zpX5q1rBhmdpZh8UZH/Y3FueelnVcUDTGPJ8wXw+Jy/mS9GN9nyWUAY3nirSbrxotIK6adg/OubunQfc/PFNHt5/wGwyxjUNQrQzvjpnqK3FOdM+7el8xmQ28f1mWuRMCWsJND4Qaxqz4nkTooMlvUBrQcSzrEPW6RDFHY/SKhrjcBqUjmS+mpralsyqmqrKxVDNBjPLldkYFm9rz4x3+PfqOK8mPzi4e/ttPvW5r5JPZuKsnnUoH+XeTyRic2uT0iiaeoHxCeViOuPJg3tkseJw/wmT4wMu7gwJLu/a+3M42+BsROPpulovK6SOgJjhQYUAeiyDiUDD/Nnx0Y6qLpgu6mWFrxH6oFCIRSGoyAusFZPWTppio5i6acg6GaYRMCfLMpRyDNYyjGn4xCsvsn9wIPYEHUekY1HisrImjEdjtI58IuLr2EbOlaSpeIUooaSIo7iTNcsEFNcRGU0nSnxfATSmaZN+gLVBv22sX+Q5xyenVKVQrkKfWgBNtFKknYwkFZWlTkeApaZpMCv7ZVPX7ZyV91jJHoxq1xfnr0Ep1aLDIdFJYxHlydKUosipq5hFXngO/QrlysOLxjomo4nI3G5E6Dim3++xvr7OZDJpQRfAV3F80BeUzrwUtqg5FnQ6GXVjsHXTVoEUMs5ah96ZSKpsXi2V1YqJC2ljyDh8lUavgp0raD6KpZHu2XVd6xD4r4g0sMyRdKTFXFjKHbjVflvjltUlX+FwPg5ZTXacczjjwAu+BDBSLi6sOX5PUhAUVUNFKrBhVmnxLX3QBtNjAYYVuqXktuCdW/n9cO3KtoBwYCI45Xt3fJU83Ed7CIIr1GLnvPGsI04kvlS1KOxZ6xkYviqnlBJ1NQWRd7KXcwexGsdkMpU56B/p9tYWGxtD6krMcauy5OjwkPFkuhxbHzOoKPa0VU2v12XQ66NjeeeKumoTU+mrcR6YstjatXt3qPgoJQCBxlcUux2yTkaRFeJjmOe+yuXnoWpnO03TcHBwCAo2NzZRScag2+PczjaHB4fy7jY1k8mE7Z0tAF/XV2IImqbUVSmmoL5X3S8MvnVE9vDU95uLcajEp9ppLygqjKSmaSSWu/eQNKmwpkZZR9brYIFIa2YzR4QmjiLmxYKyklyh3+tRVzWmMWwMN9CmpppXren8Rzn+e/XYCDd1hZ6Co5vEpER0+j1efO0Nbr37Q0Svy7+ukW433fF43BrJBalnFwyGVjAKpZYLZjjeR29a+dO5pZkVgKWmKJp205eHtqTXuHZ+qWXQrZcr07Nc+1VUpp1g0JZTomiJ/FSVBM8AWnfE6ycS/m6WdcnzguFwSFnmIjyg7BLZUPJ51qN2Wjsiv+BK8iXJpPal3iTSXqFMJowklNJIrIj5X/wv/+f86m98Ha2MyAF6REb6FgyNcdimkR6EuhJd/SInz3MWixnHh6fc+PFtDvb3OD05YTKZYo2jP1jzCLLi3Zvv8dzLL3J5u/9MYuNfQo9M+tHEOUcSJ7LN+uDurR++RV05yqLwpoMFr772CfrDbcoiZ72TcDStQTmqsqA/nfHiaMRerw9GXvM4ST1SIxKb9+/e4PqLr3Fy+ITjkxOcltKpbQxplmGtYX1zm6w/YLixgVKKd3/0Q6ytibQg6J949VVefu1zvPvOW6ytdXFNSX+4wWJ8TKcTibfPdNbO17IsCEgR4Q1YWdwdQk+JvTrS+6o0zyQzzyaK7QRc+V3rHEVRMlssWMxnVJ7GGNAzayXpqE1IZpZgRTuHnSJflDx9ss+Pf3yTO3fe42Bvj6rMUc6gnCS54ZnJRtd4ytoHeMv4TbJuhFLWtBx729LchI1ytuE2iiOSJCNNO6RZB9Bt4qW0lPWrMvfnqkUq2jSIgdjZcVfatSDFGVPPDzhWqWmrz2CZAMFiPuHJvRtceu5VMJbJ+BRraqIoodPtsXPxMtNFjvGo13Qy5vDpY9I44fjwgNO9x/Q6CYO1vpwb53nYDq0FyYvTpA08Vpujw/g4Z73UpvK027PX+bPjox1VKX2ZkbFkSSJqSlphqpCgO4/uNyRxBESUZQ0Kmroh1mJaV6EhSuhmfS5dOMflSxfIi5KjwyPOXThPHEkPqnUWawyT2ZTG1H5dhLCxBFf6ZrHANA2N36OU8hTu2rbrRWA94AObsixI0gSNBP8itCOGgacnJ1JBbD8wBOaQxLE3zBzQ6/VQHgWWKr4Htqz8hxIpdf+xPqzXXiEz3Id/1zxoZlcSm9B0L4FgnzoWF3YxMlwqIrYVCxSNMYyOT0nimMHaGlppNjeG1GXpHdKFShX6P9u+AFb6TCIJ0GbzOZ1Ot01qQmAfjBmD+IgOgjJywmU5BVj62cjX9EpGEoJq+TmJZax/vm2Fol18ZA0Owb1FyTj6MbBWtf3FAaxaTZnqpiGSX1k5p9xDSA78Vwg9OKuU8CA53Fa5/NCLz42/wpX1RNZO3f5+OLcArBJ7tJWoNkZy7RiGuSqX4FpAOCRySi2T4GUyGM7he008nS+OdAu8igCSPKwA6USJr3w756sceCnniKaRhCzQxutKesAUim63y8WLF8AJqFeVFcfHx4zGk+U0UAJ4xB54zrKUjfV14jiS3ymWHjbWvweNaXy8uUxmwgiF6pLysaNWQmPUWhFrTa/TJfVgwGKRizUHK+dxtMDC4dEhsdKsr6+jk5StjU3KvGAyFdp1sZhTFj063a5vIVHoWHzQikhT+Pc+yI0rFYQFJKlMkoROt9uuHaEKKFRCiT2NdZSNYZLXZI0XCLIwLsYoBVESE2v/LOqK2hqsZ8SMZwIQmromr6r2GVd1aIn46cfHSmwChzBs+E3TtGZhIC9DkmTELmE0npL2t9m88AKLxY9Jsg5VbXBewUgrnzlbcdI2pmJ7e5vpZCIPqA5Nfm4ppbiSVD27ca8Gg8tAbfnzq6ICupU1FtO82m9gYeEJ5dD2DVo5P0p+J9Bywuag8OXENkDEowkxkZMXzForqkZOFCfSQcpoNCZNE3BiMnV0cECSpm0junHCL1RaOMRFXclkBKzXEdRKt02qSgV3YGnqapqK3tomf+/v/0N+8Ru/QFNXNE0tPSD5nKosKfOSqi4pipK6KkV5pBZakMKr6ljLt3//D7h/9yHWGhpjWMznpJ0MpX0Tt1LUu5Z7959wafvVMwttu5DJE2G1z0aoa/KctLG8d+c2vYFoli8WC6xzPH6yyy+88hlMOadSsSTB1lKXC16fT3BlAVvbbA7XiZuauqpJO11qTyd58vghO+cucHI6RilN01Rk3T7bO+dwKOazCb21dXq9NV568RX+4F/9C4oib1VvXnrpZV7/9Be4des2Tx7e5TNvvk5VLDi3s82908M2WW+appX4nM+nspiv3Ho7Am6ZyGm/mH1YheZ9NKlQlfHzrG4aFotcqjLes0Kr5e+GSpsEJxL4iz+MT6RckGMec/feQ278+DaP791jMjrB1KXvxRRvGetCj414TakWPPBbj1oie85JE3DtEWbn2i2OM9NBaWnUjWKyLCXt9EiSDKciSVqAxgpgoGOFtQ1FMadYzLG29udZSWSC7KyjtdwQs7hlwCDAgW7XiGfH+cOSm2WQpNndvU+cply59iJ377zL2nCDXrdHHGX0huss5lPxIDg5ZjIe0en0ODjc5XTvEdaUbG5dFO8nJCAIHaKhOuiM31z0UjAlXO9qlSlUwBXKu0wv+4J+dvz0oygEXEniDqJ06SjLiqqqRW45NOUrjSKiaSzWiA6VRVFa482cDVEakSaKjc0BWkGaJMznC/TRMRsbm0SxIU0S6tp41sNqeCgJS+ylWqtKAv6qypdBBIDvO0gS4bbjZYGrqpI1RSmSJCFNUowR1cDpdEaRlytA3HI+ZWlKt9ulP+jT7XSoqoq6qgXh9QpU+MRKaS1BihIKbaeb0e0MiJOEoPK0mM/J85Kmln1VVNM8q8NZrPXBq1vph9UCfpal7EMtwKggVITqxni1p4hOt4tGs7W1yeHRkQfozBKMDM3yxlL5/TKKhBIYbBISj/xHWqOSszSXUPUO/5DuEq93q0JVKSQjQTJaI40sy3WnBVft8k8XzhG+r/CAU6DRqTPXEby8lk33y6xSFEXPrk/yO6v34nAEOpj1lSi/joQ1bzncch4bGsZXhH18AhjmW7iOQImT4fHX4zxlaXkVLBM6/3suzMLlugZWfGBCj6FS3iPI93gG/x2fPUmfocM4SxzF2Mh59pBrYzEHrUKa1lpsGYwVZS6lME2DUqG6o7l66SKR1kKFboRSeno6oqVEeyWxEEv2B1021zeojVDWnAvKbksLA3CeBeTv34W7VgSxCAuy0YH0tdgKHYlIR6iEKC2An54tKMvCA5Pt05fErjYcnZyQxCn0xYfr3LlzlJWAANZaTo9PuXApE3JOLM9GaVHdrJum7dlEabrdriSOCpG5j6KVieKW8Zt1VJWhLAqyLMa4IfOyoLEGZyp0Eos4gQW7KKVv0ceNSmnKuiCJU99jKO/crGiY5znzRUHW+Xck9yyBjfgnJHHc0q0q06Cs537GMZO9Q7797T8h6m+wtrZGYyPipEtVzInjmE6WkS9yqrri0qWLHBweorWiKBbUdQnEZFlHMrtIDILm8wWr6mZneluUXi4GPshplbmQbP7SpUtY65hMJhIs29AorVlNisKCJvcYEftERCaEfwgBOYDl/rByrAZEIRFcXSCapmG4to5zsmnOZ7O24tHtZNTGyIJjLA0hAPbJSxShYs+x9ZuY6IhbalPLvRi5MIXCuDFvfPaX+Ma3fpmmKplMRyxmM+bzBUVRtdWgkAgGs89l8iEl18d3b/PwwX15/lgRCWhKqKGql1UBm8+4c/suX/rsa0SuTV2Wic0KSvFBx+j0hMPDE7JEeo+KUqhdUZKxtrFNVE8p8jlRBGVpOHy6x8OjY5xOsMM+xNCPU050zJXrL5FPRhwfHVAWC6pyTmNKUVTJ56xvbct87PYpipwoSrh+/QV+/PaPODna9YsmXL36HG984Yu8c+Mmu/fe49z2Bm++8SY3b7/H+vpGm3RLkOnN9ozhr7/zXX73H/wDIi+nvbqg+UlM4hfIDxmO980rkAUkyE3O5jNRYPJQklRRLGXT4IynGjq/9doQ/DtQmqpq2N8/5Pat97h58w57T59SLOZgaxRGgjwnBps4hw3pS9jkvFmadbKxmCDnjgcSUCwWuffTWW6eIAt3lMakWY8s6xInGQ7ff2agbCxR5Dcm2/jqYYFtJBgUWVfXBl3W2Weoqm4559ojJA7+/WgR11XxgiUtZHXcV0GVUK3WWvHowS2KquLlT36G48M9qnzOZDrm4b37GOOo6op+XzysHj+6z+RkF9sUXLp4nn43ReFa9S0IqOUyWQmgBSvVvOXne6PVaAk4RTpC+zXzZ8dHOzpphyKvmc1L0jT2aClUTdP6osSRZtjvk0Yd6kbQR+cctZOAVCsoyxrVGLqbluFw4OVSIUkSTk7G1I1hc2OzfX+NWVKv1Uq8miSJr2qaZYN+mJO+tyPx888aA1EsFJwVxac0Tb0CWcNisWA6nXo1ytWNS5OlKb1+t/WKWXgREWetgBHK+f4HEaK5cH6bl158keefv8bOuR16gz5Zpysce3yPT10znUw4ODjhvTt3uXn7DqejkVRRnK8N2AinI5wWiouONP1+lzgR0KosC9yZd1eqFUVRMJlOib3HXOr7gIJQSwAZkeH0McOyP9BifRKbSGAcRWfeb1Ew062UfgD1hJEiUrvh3IHaGgyhWyVXr+oYJObbdc9J4O8zg7b3Qkfa07mcj02XYIpb2YfD2nsmyV1JhtqkYeW7cg59BnANQahaSTykkhN+W+5FriMKd+tPsXId/polFvEn91+XlXSZgCwDJd2eL5yrnf+r1+fcGbAwTRLiKBJJ/3CNHvzVGpTzCrSZj9dqYXRoDaYRgCiOY0nYra+QO9eOtU+7WN8Y0ut1MY3vASsLsYMAX7mIWuBYa836xpDhcI18vmil35dVQxlQUUaVd3e4vsa5c+cZDPpkWQcHVFXJbDpjdHLMeDwWJUQkHrSmwVpNrCN0pCTgT2P0Wp9IK+Z5LtTW1UDUilHz4fExl9ILnlqWsLW1xf7evhfkaZiMpgwGA6iMzEHrk0pvKxBHEXEqcb41nu5ma+pSlBSl0uhwvrfaeWPbum7oD/qc3xmiEkNTL8gShSkNRVmJwBgKpWqJG6wABlVdU0WN9F85Jyp4WjNf5FTWkpuPDtZ9PCqazxadf/msNaK2oYTDaq1D6YimLnh690ecTCd0u2tcvHCeTjf2TeeQxgmDtYF4ZHgqWlPXNHXNcG1IXVuqukYpBAHVUBSaxtQtvag9VlEGJ1zkwVqPwaBD0ximkxlNI9rd/X6XTmeboyOYTueAo9Pp0umk0jDvF++mkSBJRw5ovEqVw5jlRF1FKrRWson9hOi0aZq24pUkMslGpyOcNZSN0MKUkobV2tRnFOgC3S8kXlpr4jQhipJ2AmitiZOuuP0SytfQH17kd//Rf4pWNfkiByyDtQHD9TXPOS5ZlCW1D0qD8kjLoXWAM/zge39D01RnFj9jG6wVScOgNqJszdPHDykbQzdGNjL37LicTVDDV5TS3L9/n/l8QbSWUptGlKacYn1zh7Ku2Exj8rllkZe89aMfMxlPeRp3iYiY3XqA6uzz4ssvsr6+jlUxg8GQ/sY2+08f8fD+fZqqoK4bppMJWdanrAqyTpe1rW0uX77MfDrjnR9+V1RhHFy9dpXP/9zP8+47P+Zo7ylpEvErv/J14iShk3ZQaQeUPvN8BdXRVGXZbkuwUlPwYyLPLhYUkdBPEsZFNmgZF2iqmtl8ynQ2JZ8vqLwBalDOWlK7JBiSP/1nhStwivliwcPHu9y+eYf3br/HyeEBtqmwog+LavnRsllbVNiD2s1ROY1xYtpZ1jWll2x1VjbpTpZJ8KJEs7+pLZEHCdKsR6fTJ4oTHA5jhZqalw1gWxWypipYVAWmqRBCCV5J0fiKk/fGaA0Fl5v6T6LwtTSJM5XdZwETGYHwc6vVEfl5CRiVStAa9p/cZXxyyOXnX+Hc5etcivxaZCyqqNjfP2A+PWYxHYM17GyvMxz0xbnBOaI48fRVR+Q3zCUiuJSRDwiqU5BEQlNwvl9DeT583dSkOn3fGPzs+PCj0+2JP01jqOuSoiqxBqzRlLmsjVub6zgTMZ8W3lwzwTihiUVaUMwsTSH0kMQRZZnT7WZsbm5RFHssZgu6nR7WGGbTmZ+MtOtBeMXSTBqOkzSlLOfLkEUvqdIqUTSNfLa1pg2qlK/WBIClLCtms7nIIwOtdpavJvX6PYZrA4wV00FpzBYQA5/QdDopb3zqdT7/uc9wfme7RbLrpqZYCK3Leh8aFJimxNRzrj+/wydevsK3vvlLPHj0hD//q7/hvbt3JQHzYJyzXqbHU9SSNBFVzrEVgQNYRr1OiC7T6Ywsy1iL1oi0Zrg2oMhzcF4N04VhdT5gVX6tMeJHlwj1WQdQiSWoGQAOY21bRQnvHb6XtQWyNC2wImtQmFFhffehu78mxTLRWj1Wq6tSmFXtPtEmBZ5K5Xwi0a7F4BVb24WtVRhjBUwMYxCu9Uys4kIq8mxFxcEq9dU2K+ddnkdoSAEQogWOHPhKJ6i20kI75wO1rr229hkvvxeAwKqu27010tpbdLjlLXqmCkDaSXBKhKyckXgpSzLyomyb/DVeor2N4+TrO1ubAip4Sv7h4RFN3bSiH8FXLEkihuvrrK0NhH7ub6vx7w/a35OGq89d5vXXX+eVl15iY31NwIs09RWtkFTBfCYWEQ8ePOCdd2/w+PFToWYCjRWKVxJFREQC4A76KK1WQP82EvbgSc50OpXqpVIM+n0mvS7z2RwiR57PyTrSy9dJM4piIQC+sQJI+t710A8roiXynETkxLeOOBHGUiickXHP59JKsNaFTtqlk6WAQ+uFGE5bRydLUVEMVuKg6Xwu8beR5LAoSyIt1D6rYD7L+ajHx6OiKS8tqSH25ThDoFtJ+VNpxdrGNhub2xxPRsxmIzZee4kqqLAAZWiwbgwnxydkacqL119gMpkymU6IItnoq7LgdDQCY6nqpkUlVtFTkKb8pjEtzWsw6JB1Erb6mwz6XZ48OWBvd08WahU8IsK6IS9emoqqjPDVI4qiEDW3EMY7iCKRUW7lZn2nmV1BUcKx2pfTlhH9dWdphlIR88WCsqpF2UgplBN0PRhWmiDIQFgtwutvaZqaOBMpwarygWlOiyREUYSOUr7+rd/m6tUtmqrEmbp1lJVqkaLf7zIY9LBWFGKKqqaql2pUoNh99IT77931BlOuVfVqqVQhoPeIxtHhPou8ojMQkYQPLGutPL92zDTcuX2bKBLju6LIKcqCJE44f/kaTbmg1jH37j/g8ZOnTKczWZSTGJuIKlZVFfzo7Xfpra3z6ic+SdrvY3VM2j1hfLxP3UhyvJjnDNc3qcoFvW5GubZFtz/kj//w93FOGv4uXb7EF7/yVW7dvM3k6IAkUnztq19jY2MLax3dXpfcxOgopipLur3eyqajUEocrCHQGNr9qd14ozg5M3Xadd8YFnnObDZjNhcAIAQPq1WTuvZ/t8FTxrY+QUpBY4Ri9uD+I27euM2jB/eZTqfQVNA2+QelFuNRrJVchvA5Is9a1Q1Ns3Tabn/Sz21pjhQ0GaUYDAekmUgvOwe1EUpJUdTt7zvbYJoa15SYpiGID0jAJ5+1FC4x75s7q1zsD5tbq18PycpqgLH8/aVf0/KJCLot64OnDVhDXS/9IYp8xoNbbwsVN0klMPDPpKkLMSVVMFwfsrk+JPLnibSo9qRpSlNLP1ccx973JDpT/QVQUSQVOI9yPduXFa9QhX92fLRjOpkzHEqfoFQnpUdvbbCOGRiOj06I4y6VMczmC7pZB5uIR4bk/RbbBHqjpZgt6HYz7j/eo2kccSTu5r1ul6zbRQOdLGOxWEhTMwC+GRlhCtR11cqSK5/ggvSQtM3Q+PlghCaqlOyFWRqDs5jGUBa5B1g4E+gmSUx/0GNtMMBa6w2nXRvMKyCK4fXXXuXXvvWr9Ho9ynxOWcykj81Xg/NZxWAwwDhHrDWx0kxOjjnevUe9tcFwcwfnIq5d2eJ/+Lu/zYOH+/zr3/s2+/u7oCKccTLmYRS09C0M1vowV1KldUHlTMJl0yimsxmdrIP2Ag/dbldU7Bwr+678v/OGwmmaSlKjHHG0rMgopGnderNfOLt/uxXftbYa5Jw05J9ZvVeBlWWiGoLBNs1S/py+0nFG+dct6bwhYF89vQPx21OqpQG3hRJ/WCW7TrDgwIqxpvLVp8AAaWnBDp9QGJ+4+Kp2ewOqvZY2DD9zjcu7C9cpIyCUsDP0Nafb33Eu0O8swTtIqpI+0gnnj7xKW1DbQxTNlNIob0DbJm8+uc9SqcgZY8g6mUg4O4s1UhENPUhNbSXZUZr19Q2J74yhNg3j6ZTZfBHezjOU/431DfqDPtPJVN5HIzGZwt+islx/6Tpf/+Vf5vlrl7BNRV3m1PmIsqzpDYZEifcY1BrXVIxO9xkM1/n0G5/k029+ipOTU/7qr7/Lj370Y+qmwTU+sYtCpT6SmAN5H9pk2jM3rLFMx1PSNBNRhjRmY3ODxWLR/kyZz3FAXlcesKhajykBSjgjLKG0JhC4lQ9owtzTCgyGCGjqiul0yvWrF3GmoMwL0jSl2+mQJKmIA5iGqiwwDrqZZtDrg4Laxz5CFZX+papu2l7nj3J8PFU0RPGrLEvJmK1FxT5ws8EM02Jswme+9DUOjvY4Hh0zGp/S6/axTjE+PWW41hMlod6AqKxoqpKD/SPKSga3roUeZawlzhIsDbrRNMa0D26JtEAUCVJw4cI2h4eHPHm86yeg/JwoPnR9f4E40ga0Nizocq5nggi/moaJG0X+JcQBUqHC+sVRLTmTy8ApvOSayGfwpnHEcYozlrqq6HQyX51xBA8T8bBY9qUEzmpIbsKky93cy3wuf855KeYoiti58Dy/8R/8JtgaZ4QH/myCsXrvnSyjE3iMfhFV2vFH//KfSTlXCVJi7ZKS1NJ/nPOiA4bR6JjpbM7WYG2JBoX4d+Uz5XMCWiMUgPv375N4RY6yFIqbQXH+0hXeu3mTR/ducHR0yHBtKOOsIlQEGk2ixe02Mw3GOm7c+DFpJ6PbG1AWuad4NPT7ffJ8yvbOFvP5lFgnXLhwnu9958+o8inOwcVLF/n5r32d2zfucLj/hG6W8olPfIqXXn5JqnnakXV6VPPCNxDGrfszHv0L/jRuNRkMG4IfvzRJ/KbqKIvK08um5IucxogEeqB6tao3bqlW53w1L/gNOKCuDUeHJ9y5c4dbN2+z93SXqliAlaqM9OnLPA4Ox9YrEhon88s0ltoYqrqmsY03DVtRD3PybsjtauI4Je106PX6xL6KZVGSxNQNrhLZ44DqWVvJu2IbTFNK47/zymymae8xqLatztcP6697dk5/2PFB/TMf9Blnf+b9741zDcEwWCmwrvLeJ4pAWQkvUxQr+t0eG8MOkbZoHZMkMUkUE0cRGEuWiPeWbLzLilIQaQGviMYSZQ7XEmT3RdlJ+QrOz46PchRlSZJLQqlVhI6c9KrVNdY4en0xKDbGeMqgwlhHrFMxelWKqvKiFRoWsyOM6vKlL36R6bxifDJhdDoCB6VXFAv0lf2jQ6F5KEWCo59qaGpqK+hva9LnVYl0qNCY0EzuqMvGV/ag2xMndfHEkCr9UilM9qIkSej3uvR7fbBC2zFBNRAFNAwGXX7j136VT7/5JjhFXdfsPX5AWcwoypK6bpjlYqy9sbUhzetxQpR2ODw64fDgmJPTERs7My5cuICpS7p9ePWV53nu2j/m97/9x/z1d74jPRkr+0jknOf6x3Q6KdY00tiNa0EhcJR5SVkWBPPMbq/LIs/9WivMg7oR1ofWEVmWCn1eKS8brFvwRSHVLWuXQVpIPmQLDuucapkMH7QufFAcEaoRMvZhz/Q/p301pzVipAVv23OG84eETeF/Puz4yoNpQjfGgdIWCF5HXm3rzB4cqjE+icGhNBgb1tGgdLdcVyW5U753SfZ7/+vtcbZfZ3ndLdjnlvLOS0aKnCck7CGpDs9g9QhbjvGWEpGvniglVC+N8gpy4uKVZWlbtZQ1VOI+0zSE0NxaQxTFxEnCcDhsPdbquubUv7NKK/Fn8snU+nCNbq/LxBtwG2Oo65pAaRsM1/m1X/smn//sG5SLKZP9x1jn9zXrmM8LQNFfG4rarYsoJ6ecPnnMaO8p5y5dIU469LsdfuPXv8XnPvNZfu8Pvs3jp3tUdU3iPB0O2Q86WQdjpcqyOu6hPaEoFmRZQmQ1nSwl63aoygLnNGVZSvUlin2vkafUK9rEf5US6qylsdb7XMmaGSVRayAaigJJHJNlCTqKMLXzanUlEeAai/HxojGWOImpmgptYTpbkJclaZp6cF4TO1lfE/3R05WPldiIjLGm0+1S5uJoqrRGtdrWiiTpkGaGT37609y++V3m78w4OjrmuefX6A2GPH78mOPTEzbW1+kPxBcH49g/PcLiyNKMNBWjIHTEWm9AVVeU1ZSgEf5sICMBMDx8+BDlm53EDMi0iUxIvIR2tkxmwiNrURJC0CJ/F4nZTDj+VfkMkqu9KaY0JIrE3dlArF3cPPrR7/fY2tpmNp1iXUPkVUCWgVQo665S29yycuPNnUIfUCinohSRjlrfgDju8B/+x/+A9fUMVxeogIJ/wNEGeixRIuVNIhfjU26+/Y6ovDi3ckkrY2WscFedQ0VQFwtmsxlKDX31QLW/s/LeLe/Nn1Zcf3clkG+CKaOgS51Ol6qsGZ+cEqsYa/BVA0F7Gt8Uid/UAipk65rpySG9LMYlfeJEpExniwVpnNCUObduvI11htHxITjHxYsX+eovfYMbt25ytP+EQa/Pc889xxe+8IX2mTrnSNIYppY461FMjjEeZZf+EplrcRTR+EUmbEZtzxUwGo0YjWcsguu4X7Tbzd5IBcZ49TqR5/a0E9PWWsjLiqdPD7h96w7v3b7D6PCIuspxzuC8HLN2skFZZaUZ1l+HsYai9qZ5TSPNfo42mV4m+sGELiKKE7KsQ5JlxKmYdBkr1ZW8rDwt1cuTO8AZ6SXxSUCgvlR1TtNUPnBfUjho07Sz6OlqP1yYf88eZzfkj/b9D0p0PigBenbtkSRX3lfxCgqbu6exyItPP+uyMewy6HVJfaW108mIdUQSx20CE/ryjDFeLc+RJAllWYocurVnriMAPIGuupoE/ez4aEeaJr5iLfM2TTMiIqpK5Mytk+AoJN2VaoiSGIViuDag2+1hGpH0NsD0dMw8XxCnG7zy2htAxNOHj/nhD39IXixw1rKYLzgdicpX1kvJooiLUcmVnQ5v7S9IsnUaa1sXdefXEKHOhAQk8qIcS3pUmqQCLjorUspNFeBzH+RDN0vp97so56iaGmuCFLpCqYZLl87zd3/nf8D2zrasZT7xaoi5+3CX/YNDDo9OmUymXLqwzee+8EVJh6zBxBnv3rzHj374fdb7XS5e2OLq1UOuv/AC3d4aVbEgSzr81t/+FteuXeWf//P/TvpqrcMq6Rm1xhDpiCxNMKaDaZY9sT5+wljDIl+IJLdK0F5qt6mkt2zp4+LodrM2IZVkxqFZVsDa3l23NORt9YM4u1Ys15PlXrZ6hKThTCwRvrnaa8jqnriyKYZKyUoZJuQzLvyjDf6XlROlAtzh2t8VxsDKnu2TvGBsGi5BglJFpJeArPJjqP35tTdalW8oH/Q6D5aGRMmfTykvpsHKuu7XrBAPeVG51YpzoPlaL7kXQGJsEE5YSe4cXrDItr0YUtnzvxNFHoyOiWMJouMkFml2z7TRPq6yzjJcGxB5+mbTNIxGI+q6buWng3ltr99jY2ODyXQiP+sBD79rcOnac/zu7/4dNod96ipHu4bJ8QGn4xMOTkY83Tvm+HTGb/7Gb3I1y6RRPlE8evKE+4+e0O1kVE3DufOXyPoDUtXlytVL/Kf/8Hf5//3hH/E33/0BtWmICd5V0j/U63a9J5Q3Evdzr6pFWKjX7Ul8mMRsra9zcFRiTI1pFDaKMJHst9JfqNpEOai4tZRsPN08zdq9Bx87WmtJogicqEt20oSqyGmKEtsYems9Dg+P6Pc7bG1vM5nNcE5UUhMl1MO14YDm1LBYLIijBBVpsiSl3xt84Pv2YcfHSmzqqhKufOB/K7VsQlMyWetywcHeEdsXrvLCy69x89a7TKdTqsYSxx26vT7zkwVHJyecjsdCHzOWqq7IOh2sdeRFLry7OMZhfGMsLfq9eizRVQCvsOKRcmstW1tbHB4ehhgN5/0GmjY79QuevFK4oOztXzRjDHmen/maf5rt+aSnAmqWkyD8vK/htKZgcRyzf7BPXZWk2VLlIai2SbYcEbwrQrKiFNIYrJPW5TVUpEQ5p6Kua9bW1kArNraf5xe+9hVcU4Cz7eL1QcnNh9FWIq35wXf+ivF0yspyR1ioVhdvPFqhveb5ZLrAhibK1QQtnGNF8jl8eTabcnx8jFOrzbCwNhiymE25c+MdXn75FWazMWksk14S7R467hCnPZJI0d/YkPNbS13W2GZBUebs7T5h/+lDTk9PUUozm45YzGfMplO6vS5JknDu3Hn+1m/8Nt///g8ZHezTyTKuXrnCV77yC+3LrZUGDWkc43Bt82t40cO49Ppd0MonFGJQGxDYQPM7ODig2+/7DdbLILcVGVH6McZijXdX9simdZb5POfBo6fc/PFtHt1/wGQ0wtYlYt0qSR5WZrX1Salzlto21LU4Ntc2mKoun6G1y6BeoVtEK/Gyy3GcgoporMUZQ1k1lK72KI8kugrQvsKjI1kg66oiL+YUZd7SZz6MNhXenZ+UVKyO9bOJzodVcVZ/7tkE5yedfzXJefb84WutLCuCGqPEeXpzY8i5jQ3W1nqsDQctAS701Wjv0C002LhNUEDWvNC/FSpmIeH5oLEJgcnPkpuPfnSyAUks77B1IqmfJN2lCpUSMYaqrJa04UiqClEUgUroDgYkxlA1itPJPsPhFtdffIW1wRpv/egd3vr+D9jf2xclI2Ok50XLHBiur+HynPOZ41wmdBAbC+10MOhRV6K2hHMepddtkGtMQ6CLdjsdCUD9+lHk5RIk8JBqJ8sYDHpEWlHXTdubJ8uy4dpzl/ndv/d36PV6FEWOqaqWijJYG/Ldt24zGo0kEdcRs1lOsZiRJDHadahMw5O9A2Z5wywfs3c45u7dR8RJh83tCxgHGZCk8JnPvE4Sp/w//1//b+q69MmbzGnjkfROJpLaRdiDQ4bmJen7/aYV0UiTFNsElSVFFHWpqqqVccafQdZxR7CpOLMltvvakimx3DrD/rU8z7NHqGa3a4WvkCiB1yVe8KIrOIfSDhVpvw3a9vPlHpaa0eGjnFvu4QKOLUHRcGltZT18bsiRws9p7zUTDFpXkii53pVzht3bgdeFft+9tn/XWvzC8BR1pbC1/J4KuZBPLrW/z0D7lTaQ5ZrvVsa6NQIN4+Gct7WQeLAO/a1xJJk7IWkR5d44jkHJPLKenqcjkRymrvj/s/efz7Yl53kn+Mtcdpvj3fW+7C3vUKiCI0AAJEADsklRUndLI3W3RupoRcwf0BEdMx2hjpiIMaHomIiOUE9ET4/MUBIpGpEgYUgUgPIob++tquvOvcf77ZbLnA9v5lr7XBSIqg/9qbHIizrn7L3XXitX5puved7nQYW03R6c5xmD0Yj9A0dGoXF+lqz16Zkp+oN+ncQ2LuhSCk6dOct/9p//HWJtyUcDqionUprcGH743CvcXN0lKwWCuLd/wFKWYQNNHIVcuvwRz/z4ZZI0ZWFuimPHj/DZz36WJGlBZAniiG/+6teYmujyVz/4kUiIqKZyr7WmO9GVnnJnE9xkoChLhqMhYRSiQk2cxnRabUZ5Rpnn0qvn5pgOpeelLBuKcemnERKJQCtX2Xbj7Cp4xkgFTQeKMNKkrYi5mRmGoz2SKGFqcoYkikiOt0SHq/LzyVdO5XuL3NBpd0jjhHarQ1k6mQMN/WH2U+vtZx2fKrBJ4pgkisQ585PbO+NBQKUqNm98xKs/epmT9z3K2sa2RM1RQJTElMOcdmcCtb2JXxF5XtTVlOFwSB7kRJH02JR5Tn80qg1FFEr1xDMA1Qtq7Chd05d/zRhDmqb0+z0sYEYGGBsgl9mpmsiHRkrYZ2nM2M8WL6TkGTAqrwkzlt1O0xQQFhefiVZK1RubsK+oOjOfpilJLKwwyjOyQCNsVlPTjhlUL0CmhBKxMhUVlio3fPUb36KdOsewTgiNE0OODcHHOHVKKcoi5/kfP0dpG2e8Njp1Zt1/nvrarLUMBgPqTE79Pb4078xlbaPlr1ubm1KBCtxzdMx2E5NdQh1w4ewJ0CHHj54kUiUmz9jc3qawsHuwzn5vIAGRFQbDPBvQ6w9IkoS5uRm++stf4KOr01y7doOVWytcufKhq1gYwjDk7nsvcv6eh3jttbc42FolSWKOLC3xxS98gSDyglVuypgGZuDF8/ym7Meg1W5JhgOF1YKL9+NnrAQAoh9UucXeBDOeFan+2UpQ1Nvv89HVq7z/ziVuXr/Bwf4BmBJFJYG5fzYuYBS8cOHKvq4p0mU/fQGw3p3cZhJFolocxSlBEAmLiRboZWWFUQVTSqbGuqZhhO0nVBqjZU4X2VCqMvmohpfdPuf+pqBl/O/jTb6+ob4+T72B+93ffRYOQSVu/57DGdjbAvWf8V87PlT1uIFVFVoFhGFAGotS8uzMNDPTk8RR5Bq6NXEkjeG+sueTRD7R4u1L5Wh2faDj79nPnTAM6vmkHU2n/9z4uP7i+PnHf/JbvyM9HUqqvcYIjazg9bVb3xWmMk6DzIN6Db79Ga2xhHznr56nqBTHT50ly3L+f//m97l+7TpD1ydXGYGXxnFEEsVoLf0u2UHF5HQEWpGXhirPHTsQJHFEkeeO6dOrttumeR2wxhJHYV2FLRylbb0AlPSOtlotgjCS6nDlxHQdAuDI0hy/+9u/icayt70t92gqMOLEddpt7r7rbp574UV0ILowpTEM+n3anRahjhn0B/Rcw7JCC4wnjDh56ozQzWtDZSA0htFglzNnlvjN3/g1/vA//Ie6Yd/vdUrJPE+TRGQKqrJ2rq2SastoOCIMA6fb1vRAgARBEk9oVymXxI51kFjP4Died/MVdRkyVVP0invg98+6rPJTh/WBF4wFQr6K4lKozsH3z8XDlAMdCNPUuMhyLcgtn9cugKiNd30ZyhX35DzjqUStfX3K1nqDjAUl44dy12ut36mpr0+N64zJpGtOoRQ1X5n1ibQxuJ7S9aUeDty8sbZuzji4myvp+GDHf6ew1flxVQ3Jmrt35c/v7qWpfkdS3aRqqlFunFqtFIVyhBglAwdDxe01Hso3PTUBpqJwwb61tq7oLC0t8Xf+1m9TjPYxymJK2XOzqmB2bpbtvSGFAa0FnjUaDiWBUSnSNGbl1iqFsRT9jMFwg7yCp58OpJoZBITGYEPFU599nLIseObZ5zFlgQ4cDXMYECtFq5Vw0Gv0XixgSoH1t1ptdFBKH16Syj0YC9a1WVhb6+1ULlAJAilghEEggePYeqn3I9eaEGjxA2dnpmi1UjoTE5w+cQ9llZPEIaFWRHFYP3+hvC/Ii5yqLChzsW/9wQFFkZO2Wkx0J4iCSGRISgu8+LHr7vbj09E9e8fUb7SAVY7SGFBKo6uCxekO3/+T3yc3MDE9R1EUDAcjFIp2Z0Jo9xwcTOBlGt8U7Klyxx0YpTRxFIPLRgUE9SbuHQJfpYEGlmGtZWtri7NnT9GZOEM2yri5vOoY0erHXi9mqSJoZ8ganKnYNgl4PM0s3j23tjZC/oz+PnzWofaBfHZkzCgaY0jSlImJKbzxok6kSJrD+rGvnRXrLUvtxCRJQpIkYGF+6TiPP/koVtIldUJGN197yHjf7lxK1Ujz4aVLLC/fvK3K4Jw8XPO1D/xq22tRphI+d5998bfNePXGn8p9p9asra5SFCWRDjBGHGatFd2pGW5cvczu1Y9Qwz6B1nQ0hNYyl6SsdicZDoZsrd6i3ekwM7dAnuWMqhxMTqAjnvzMI2xsbLCxvsZwMKLT6TAaCud8K0m5+557OHfXQ7z04mtsLF8m0JYTp07wjW9+o4ZZ+TlXK/I6Q690gCao9Wv8o5ucnKwDOV+Rq/856Is11jkZlfvnBFLLqnZu86Lk6tVlXn/tDa5+9BGj3r4Yfkef6jcr41TLy7GASP4ma6vGNfu9JpCKYBjFRFFEFCcEQQxKO/rmiqIyWJcF9Zo+9bOzllA5fHZZkeUDDrKMvBjVeGY/X5Rqvvgw9KCZjz+7ctPM0/F/WonjpHE9d44KtwZqKFULGEoGs76Q+ry3i1mO3994b5gfONnMZLOLQmGfCkNNpyWN2K2W0GTGSYx1DnLgoBJ1EOICmcRT8prqcD+Wu15v20RzCypXHbYKMqdxEqqGTt7jvAHRRvrF8YkO45IblVMyF9V5B1FSvterkr5S1/wsDEyKKAqE2VBrtvYyfvz8S3TaXZaXV/ne937M3t6eq85IU7LS0sQ+Ozst9ocKYwq0FaHPzBqyyqLKkihOxFk1liSJGQxKt+sYROHd1tZYKk7S51dZw3A4cjpw4FdEmqbESSJ2wdraHigN3XbMb/3mN4jikCKTTO7KrWVOHF3CVqJTVhY5Dz90kdffepuJTsqJ40u008gRi5QEqWJze5vJbpvTp46xv7fPxsYGDz10kfZEF7QmiGOUDtjd3mF15QbHTp7gnrtPs/W5p/n+D36Ar6aEYZMRlr6zRAKbei1K32GWZbQ7bbRqmK3qNRQo0IpiDCoE3vF2vMGqqRB8XJLF/yR/0/VYMma/DleLm2DDk+1YX4Wpzyh7przOWOJUXvdkB77i4/0gY/zTt/WZtPt+6z8/BsHDBTBCKCN/k/jDpTh1c4fKfV56fKtGcBTwYsAGw3gsgvJEAGPnUHKF9V4YNLAzsZ/SkK6tp4imHh9f6bcKgT8GVgga/Pl9ElT5a5Lx8ZVyi7PNznZSPxfX/+31ViwOdicwXyHQaESjB70eRuFg14KWSZKYtuvj8skD2UM13W6Xv/N7v0MYGkb9Afv7e6RpiimFPTBOYu69505eePktUIo4CkW4vSrQOiDPc3b391yvrcytxx97VPQdLSQ6INQB/YMDev0Dnvrckxz0e7zy6psY4yRAXACepilZlpMVeT0fy6qiLMXfCAJJPiZJxGg4JI4imVO2IiCs0SLSziDorNAR/xhbEbg91wc0xiUJdOArW5ooTOh0OiRpyulz55noTrgkdVUnX8qsEAblsiDPc4bDAXGUuH6hqvYf8iJ3QWlJfzDgkx6fKrDJ8xwdBWgLGIMOg+bvPjKOQ6YnU3q7m+yNstqxLfOM4ydOkaYpnU6Xvb1dBFeu6gl/u6PTwCkqylKws51Oh8SxPPT7fQbDAShFt9OllaZ1lSZzNNI6CLh+fdmppaZMTE4yGGTufJKFQImyahjKJCvr5rLxwztjLlfnNG285xMEgegbuI81VSNctqc503gGWGtNpzMJtmEp8WrAPrPgKwPqkLFx1yR5CXHotCLPCr70K79BK9WoWoOXn8pcyzk+PmMuTqPl+R8+Q14UTQMrTQ+EHbuP2oiCM6KlE4vzw6horKEfzcPN1RrF6spq/fwk8BWIRKszycHGOp1hn44pmEfTCWIwhgzDitNqqBxOfGZ+gVvL14TeM4pYmJ+XoGZzi3vvvpe/+sEzJEnCaDRiaWmRc+fv4OT5e3jhuRe5evkNqnKIQjE1M03SaskGUjXZ8LrqYky9cViXrSrLhklvfm4Wa31QY2oD7hWyvbK3clXGohCMbFUWGGMZjQrefe8DXnzhZTZWbmKrAqyc32LrKoxUDqva0NQbRj30qp6jQRBKNSaK0FoY3SqDBFRVyXDYk8ywO4/WUoUIw8hBxOVZVkbgMUU+oiiLGlr2cWnAQwH5x/79Z8/Jv+lnharpkWsxtyCo//mgQAfiBFZVVdN2+uCkqqqaaKQ+v2d+UeOMf7jzie1J4kh6DhWS8aqpQGV+BAhcCQ4zlY1XZXzGLwjjOrCt9WvGqjD4tWZEuT7QmtJlCuUZS0LE99f46/7F8cmOohRYZlm6LK2Sjd3rjaDEYRJYsIc9i1H3Vfeqsrz2xrvkWUVWZLz59iU67Qlyx9QVBPLZIAzodNroIKTT7ZAmMceOLlG0WwSpIYsUDzx0lmvXVxmOMvkOLbTpQRg2e5PPaCPJxTiORfTaWEon/mlruyzVmk67TaAFVuf1cQRtYfmVr3+ZhYVFhsMhRVHx0bVrvPT8i5w5cZTHH3tAzlMWTHda3H/xDra3tmi3UoIA+v0hSRqDhZ3tHZI0pJVGtNJZjizO8NBD96G0pj05SRK3WFte4ScvPc/i3BSDqQ5xEvO5px/nw48+5MbyTSoMVdXM5yiOiPKYLB85nD/imFtBfFSl6G0prVBahK7H101DyeyHzo7tSB5qNr4PNg6x/5usW9wnkGre7Yk6+VAT9NTRhn8KztfxicnaD2i+X7sAxlPBSE9Vk/DE9b2ATAFjwQsSK5pEaN3D0kxVUFoQNoEWP0ZbVx3ypEguLaRccOHmiIfIeX2WmujUBTlC9T/2pRKtSf+n8ZUUVQdOzpvBjD0FrT3rmARCWgeYsWAEN35aKYrKYjEESpBC0mvS+IsyD5T06Dq7X4eVylU33HoJw5agkKz0muZ5TpYXeC0j/93dbpeikCqoMZYK40iDLF/+yi/R7aZUWZ9yNOL5Hz/LmTNnOXHsKFVVMKwKHr7/Xl574xJ5Kf1sWZFRFAVJGnKwv8/BYCRJh6Lk9OmTnD9/BqsUre4ErVaLg60N1paXibuisfPlL32BW7fWWF3bxBrplw3CEGUNrVZKXubgx90Y6Vkuculbc1TNURQ6mRPjnqOMX5okoKT1wT93pTWB8oWDqmEydHNMOSZUeSbipxtTMRyO6Ha7rvIjyUBjKsJUgpq4nZKmCWmrDVb2v8AICU6SxI5QyFAW1Zge4M8/Ph3dc6Brxykaw3zrQBxrXPZ06dhxZmbnOFhZQQVKVJxHA+IwoCgsnU6X/f19rGN68RtFVTEWMBx2erwTXZae8tb3AogBGg4GJEnqFUEwKAdpCykrGGUj9vb69eT2lsLDCaTCY4iiyJXoLb4HwD9Dre0hIyTnkp+rqmnOlwU83m/hVKuVojKl5FysZNinp2dRnmGtLvWqOjvhL9gHNSKv4qyc/49zlqlgcvoIj33mM2hTwW2+ja3rBx8f1Izf1/7uNm+/9aaMpadltE12Zfz+XcgjBtT1tpRF2Rhbb/zHeC1v3xC0VqytCSV35Rx29yHSpM3OYMS01qSFpWsqupMdijhhp7cvWirZiEAHDIdDDna3WVpaZGZqkl6vR6fbZXt7h52dXeK0xcREF2stSZJw+swZTl64j+effY7rH7yFqXIXi1mWl5e5dOkD7n/gfso8q6NT34NSFMY1Y1b1PPZNuFpp5hfmXUbeOA0WWwchRVHWwXeRCTFFURYURUmel1y6fJ0f/fBZ1lduoikxVYEx4nxlLviWzJjDqOLnS/MMwyAgikPCKCEIY1AhSlmqqpD+mpEEJdaUwprnnPgoDAi0OO5aK6FkrQoGI4GWlUXpsmV15DoWwH9MBD02V27/+WMrMu6E4wGMLIumaqS0BDXjwcd4cONFK7VyPSlArGKCQDDeYRDU5/Pv04B17IUaRaSDWuvCV27DsKGFl3mrpYmV5nrDIHBBSFBDznzPzHh22Ac4QA05q9nPdCPIKwxXpVStlQRMqrT1dfnz+cylUThti18cn+RopSlJEhOHEUVlXE+GsAseDA5QSCAbtaMaT29dNdkqadS9fOUGSgm70M76JgD7+/sUWe4cZzh/7gxHjx4BqyhNRZFnlEVBGIrdCqbaXLq+gZpCSAYsCLuVzNk0SRj65AWy8oIgIIojkjiu51bpEw226Q9J0pQgDGVvM2NOIJY7L5zj3nvuIctz9nZ3uXZjmTfefI+Plje4dWsDpQ2PPnQRaytUDo8+dA9/+uffE+r8IGYwGDA51cFUFXs7u4RRiA5kPi4dWaA90aUzNUWrM8Hy5Sv8we//AZvbmzz28EUmp7tMTM+g4oivfu3L/C//67/COO0OD6tVgVTG4igWweaxPaWqKvKyINQabQReNN5fZnB9HOMP3Ace7lmq21nAvDFzhxQ7/O7ZJCX9SRsCEZ+AEOfas1iCZPbHYhl5ekpJoPExvcPKe4zuu2v42NjfxCdw4VK93p1dCsaFMN3/ukCohi+Wbi7ViUuaapZnUkMCE5n3vt9Pela0dQlKFbp+VwidbRbtFHFmwzhy+6MlUK4/OBDSJD9BtUYqemHgpHO08w+Vu39/n1qSrT4scmtBIJNNMLh/sMdgMJCqt+sn875qmrrGdytQf4ulcgiJfn9wKAmltSZOBNEwGo0OsQcqpbhw5wXuu+8eTN5jZ2uNn/zkFV5/6xKXP7zJN776BebmOpRlyfRElzvOH+ft969Kv+RYImttY5OyNFgCWp0On3v6s6StlMmZGaYnJ1j+4DJ//Ed/wr333s2cnifPMtK0za9+7Sv8r//q9ynLxucQWxUT6oDCs+/h6ahLqqKkCgJA9j1f5Q+CAG3HAluEnAFwgaOMf4OGMk6UV4JNIRh0fTo6oNVK2dvdd2MlzziKA0eOJIUBz3w2HI7EvobSvuFhcDVqwa0jHXzyPe3TCXSONQyFUURROW0ZJRMnCkOMsbQnJ/mlr3ydf/sHv0/p8P29fo/KNXl12l2iMCbLh65qE9aOiTGVEwrzTC+HcfCyKDVKBSgd0JmY5NTZs/T2DsQhLnPCUNFyzk6r3XYY9ooi870GhWPUkF4WUUuuyHJXqq2NXDPB5SGE5HlDqVcnUXxp0s8IBG4gVYvmHKaqarYVaxVpKpk7TzUpJ7VjZ26Mqtcm8WJYQjZinSMVEScJOgx46gtfYaITgnIVo+ZUP1U5+lmH1pq3X3uNvYO+9NfgKSsbQ3koA2abZ6TGDKRkAfxZ/6bma3n/9tZWnQ3zm1oQBCRRRJkNsEVOUBlyW3Gl1+NSfwPbbqPTDlEUceL0Gc7feQ9x2ubalSusra4J7GNuls2NdbIs4+bNm0xPz1CWhsnpWY5fuJe//v732dtcZmpqgt3dHaqqQFlFmY/4wV//NQ888ABRmCCEBBatDVqFFKXBWGEmKascVFQ/b4D5+XlKF9TIGBlnYMSoDQcD0iQlKwryPKOsSlZWt/nu937ItQ8/oCpG2EoyO6Ns2EDJ6jklD8AizX1hIEFMGEYEjjJYKkEl+SgnCMSYJEmLtCWqxtloSFUI/34YSgbKWkueZ4x6Pakq1bBKn6pT1FLpjAfIDU7hdmhZ0yPmg5jDAbbfRHxg4zdtDwfQh4IYtwH6z6mGnVBeVzU8qPk8dT+Lz7Z6TDeqaYKUzLo4Gkko/TC+9O7nbxgGdVASBAHaeKiIh/qpWsl5/L5uh3+O37evGPlz+g25qioJUF1vjtYCLfCbka/U+UDI9/nVAru/OH7uMRhm9VwpjfScGOPYNCtQWrSXhqMcXLZdu3HO8oLBsGB9Y4epqSnZkJFkwNDRr4ZhwMRkl5u3bnF9eRlr5G9x7Bp182NUm9tMX+gytzDBQLcoa2Yo6d0SW6jpdDo1rFVEb53T4easwDfGJQxk3ndaLYEgOduq3BqOkoBf+uIX6PWH7G5tsLO5ya1bK1z+4BrGaHKleOX19+m0Uy6cP4OyhslOm/PnznJj+RZHlxYYFSWlkSCjP+gTO+gKFhYWFpmcnWVqeppLr7zLX/7hH7O7sUVRVLz11mVmZqeZWzxORMiRo4vcc/c9vPHmW+LMut5brKvEhiEq93usSzIhlWubJHWFwBNtjJMN1ZV2F7TUFVpjQTXit4ciAGe+xiHGfmv+uEry+JiPv+ZJxOr9V6lmh6/f6/tmGtvn+3D9tdVJHxxdskuk1QQIirpf2T+AGmrrfBNXH2GchEHsla+oiI4QSG9XbVewjswhJdQhaOsq0Ur0/YIQW1WiEegGoaZ/dgyrPrWKlb/JNSqk7ahBpniEgXeKpWIlTfdKQZomVNaAEephS4UtK7LRiCzPUVYEPX2PricW0FrVPdr+mXq7aoyhKkuy0QjtyBM8eUgcxa5S07Q4BC7p9cUvfB6TDdhdv8VHV67w1juXGZWa4f6Iv/ir5/i1rz9FK9WMRn0euO9uPrhyw7VICWU8Fm7eWpUKmVLcec9dLB0/xuziIvPTk7z/+qv8y3/5b1nf2uNgmPHUZ5+gM9UjCCKOHz/GA/fdyyuvvw1W5FCiMMBqTZomlL1yzEmV9VlUJZGRXr4wDGUcrYPduYqvUuKD1UlEN/HHE2+N/+GWhTUopD88DEK0DtjdPyAIQrJcqsdVFcp8rSQoMlUuelLu/LkLsgRiKvueMQarHLR6vJfi5xyfku45lFJWIYrLWgdURYkKAgIlzVcWUXK+cPFBvrS9x/e//+fEccpoNGRjY4PFpSWSpE13YoJ8e+RYK4RuczgcOnrltG6oLYpxKljqBubAlfVHoxEry7fASjAR6BCo8PoS3imzVmjxNFpYt/IKpSxxLPTPeZ674bVEYUQYyEM4FJg43LXgDyNAiUq1HWvYtRqlLHmeHzJ+splFojKuA4IoZnd7m93d3Zr5rBbc09TZv0BLliHSYSPU5gySDiTyF5rejG57ns984fNgG0jcWFyDN2s/K6teO6Om4qXnX5BalhsDX5Gxlgbb7cWZDmW3HIRJyWehKSVLkkvVzrMcCl/u3NzaFraWQw6xAqWptOWD4ZDAfXfeHxBPzzC/tMTs4iIPzsyQk/DmG2+yfvMaVCVow7ETJwnjlINej6mpSdbX17h48UGeff45ijxneeXb9Hd3mJmZJIlitrc28eJpxlTcuH6d9957jwceeECef1VRaYPWwqBSbzqW2vhpHRDGIXOzc3gopsBVTE1uUZaFzHcdkOUFe3sH3Ly1yh/+0V+ws3ELUw4dZWaTIcFl97RLBERRLCxlDlZmjDDEVKbCViVJkjLZnSCOYyxC/zoajhgO+5iy9OyjYC15kbG/P6Io8/rZ/HTfi8/o3P6324/DDf7jlRefEVKqCYqU9gFMw7bos36Blv4UD/UKA12LVwahDwgaFp/QUdJ72IK/x/Fr0WFY35/vbRlvvA8CaeI1iPPoKy6NLRAORa2DurIYaE0QRrWYqDGGKJLemHF4mb+GmtkMceCMW89YyVTGiQTOXpzOhZPYOstIDbGxxhL4ngR3naOygaH+4vibj739nuuvEcpib9KMrWr2H6MsMKxnu9CeBpSV6FKMRjlHltp85vGHWVldJ3e0wwBTUxN11lgqgMqRaYjQYJqkVGlKmeVMpiHD0iuleYfVVfxdg2+sQ2cjbZ1VBw91FRp8VW8AhiSO6sC71h0DtDJcvOdujh47itLiOGxs7/LOux81MFssUzNzXL25QUnAXXdeQFu4/+LdrG9ss3zrFgfdDktLRykq6A+GbG5ssWDnOHJknmPHjzM3O8drL7zMc3/8HQZb24TWkgYB+/s93n3/I46ePsOxuXkmOl2efOJx3nnnXQnYrXUECkLbmyQxo9FQSBFU8yTyoqAWyXbJAePsrgcTWdmU3CeUS7rZek3hgiIx5LZOqnj77cMo+bSmFrOsn4Oqf6/RC2OvuwEXJ5Im0rH+enDBjYvbjJVka3UoMSbzTjk7GWipjEkg4WyhljMaa4kCXVemUdpVUSAK4kYAWEuGXQXKVV/EYCpvw92OPcpGBEFEO03lHpWpHXQZaAU2rPuGXDiC74XRWmiA613fPz8f5PhV5Wyi5L2caKexknCwFagAHSq0EcInpSxVUQrj5nDAaDik3+/jVH1cbyOkcUwQBgKv8k659fZS/EtTVZRe+NY9zziKSdOUvGySBS63walTJzlx/Bi2HFLkBR98eJXd/T7Wyv6yurnHM8+9wVe+8CihtSzMz3H86BK3VjdRWmGswhjY2NxCq4D5I0d5+JHHOHvhAnOTLX7yw7/iX/3rP2Rls49SATdXN3nv8odMTk/TanXpdDo8+eTjvPXu++RZha0qjJtzcRSjVB/fQiCkRIayKCijSEQ+XXBfVUbmvRrbqwKvyueh985uqCY0bqJy2/g5bu8WSH3J7MwEw9GIKAoIlATjxsr8CcJA/JFsSBRIr28cxRz09lFout1OLT+hlabf873xP//4dHTPRY5neUIJhCZwZbVWq1U3G88uHmd5dZ+HnniKV15+ntX1FcIwZL9/wEQxS5y26XYn2d3ddgbe1k2CvpHaZy6TJMXjZAVfrF3WPHQB0YD9vR2wiomJSdrttjNUXsmW2piPY239HK2x7oGuHZhWq10bGw8dKqtKHEbAVhVF6WhVx5ie3JnrjPp4ljdJW7TaXarSixBWTM5M44U4Bc/aQOIEtiTjacYnVk332EBRtJLs9YOPP8XMzCSKsr6UwzHMmJEdv+IxB1Ypxeb6Ctc/uioSJD7r4rMp1HYLlzK67WwycmE0Rs3YuLTNdXhGN3ceU5UMXXNYWTWU22EosJCpxUU6nScY9geEUUTSahG3UzCwsrbFSz95hV6vV0P1JqdmWDp6ks7EJMP+kK2tbWZnZ9Eq4oXnX2FzbZ3KlHQ7XaYmu2RZThyGGFPVHqRoW2R8//vf54EHHiAII0xVosuKqrJkWYFRQnGpA+3msoxZK02ZnJquWc480YXXTegdDHn3vSvs7R9w+b1LXL9+Q7SSiiFlPkBZi3UGX+uAKI4Jo5gkSR3dt5ZqUGUxViiZO92UOE5QQVCPZ/9gn518iK0qArcHVWVG7uhri6JwTkzzHH82RPGng5zb3yc/jtOF+r1SjVFE+uCmYZ0RKnNVBy+eGTFykAWxB6HLvOlm/mt/Xtd0HDmYhBI1aWGq8xCyZj77aocPZKARVFVKuaZ/aab0FPJVVdWVWF/RrQMUY9F6nMwjxcNV6pBONTbFEwMEoUaPMZ8VhWOpMg6G4bKzyjZrTapNIkoXhSGRbx51yzLQmtQFVb84fv6xub1Tb5rW2aswCKhsVbNUesgOjCf5FZW19PaH9Hs9tre3OXl8ifm5aW6trBLHgTTsxyFZXtRO4PTMNEkakyYJkxNd7rzjPJuJ5sAUhJFmdXmNQI853L7XA+3Wl3eY5XWfnLAWqsrWgp9GSVIpiqOavKCekwrCAB595CGsqTBVycz0FBcfepDdgeG5Hz3L7s42k5MdVBCQV4obKxvYIOKuC3eQxgGPPXo/zz3/E4IwwVhBRZRGsb3Xo9PtcOz4MWZmpnnj5Vd55QfPYnb3SY3C6pCRraDdYmphgfN338XM9CwH+wNOnzzO0tISq6srmKoh3rEO5hKFIaMqkzFw42Fc34JFINw60ODlHACQHgtrdS17UCfrGNunGdujXJ9rHfkoVZs/ixWnHp810ai6t3GsB8Y2UHkhK/BN/h6e497vnEjlkjuAs3WRO48ECL7nLqhhtAFh6PrpvIaWUugwQFlF6OMUJRV9+ToJ97SuQemyt7u54qu9PhmnCUBBaEqSyDFkKgkIfNHFB0TWWoz2eicu7DbSuxLUc9k5xlomeK2ro6WKr5DKQp3Us4aiEFId5YKPqhI4tu+HLoq8Rr0o6+CZQew0WfShyr1PTjaBlXXQTEuWF/Wa04eIEWx9fh/wV1gefeRhNAbCiONnz3Hyxi12egMOeiOGg5w8N1xdXufdyze57+IFtAp47JGH+e4PfsTE5LRD65Ts7B/QnZ7lkUce4867LnBkdoLnvvfn/Nt//6esbfeF3krJOF+/tcqTUcLFi/fR7/UJg5BzZ07x/qWPsFaqHRppw4jCSKq3Slo8jPMXyrIUuK2W95XlEGM1aJEF8ffc7PcWY31vn/uzslhbolWIh4ZWjgip1+9TLOdcvHiRmelJpmwq34eP5y3Wun71qQ5lVdQrzxhLqz2PqaRnyD9bUFRl++eZ8vr4VIFNqyXc/piGUcVYQxAK/tBaS1EZKmOZnJ7khR8+x7HjZymqkv39PTSKQIdYZUnSlMmJaXZ2N4XZpN0WTRt7uCn6UKZ1rCwKilbaodfrk+ejWkjTG5fGqMnvdbSp/OveEVO1c2OMYXFxEVDSfFnZ+nv9/YqSbVYHS+OZ6fFsWLvdrtmMjKPEHDj85nA0FEwo4LG9euz+fCOWDpTLeLj7rwVKG1gO7u9BmPL5L38VTYmH+VAba3ev/DTzy2G4kDh0b7zyCv0sOxS0Heqr8ZutO7nPCsj7ACxxHLkXXTm+iYbkO22Tb1QI+0WWZZhqDC6hhDK0LA23bq2yvb7C/PwCnTCkGwTkH13B7uyw0h8wVAoQBrGjx45z7u4HyaoAWxXsbC+TjXKSOKXTnWD96lUs4lwWZcGRI0fY2dtla2uLQ31OgK1yrl79iLfeepuLFy9SFeJEl2WFtRX9YY5vHvcQEGstM9MztDsdoSi08pwPegOuXbnCW2+8zUsvvsLGxgaV6+mx1kr/SpkTBAJ9jOKYJG0RhCEWyaxUrjqUpgkT7RZRHGMt5FnGYDDgYG+P0gneeXG1qswZjoYOVnaYdtk90I89Pq6yd/vfxtfQeJbOVzaUathStJ/X2s1jLRTunk5yopXSShOSVuyCF1XPKd/uO872VUNjlWS4rXH6L87fENpT69SyZR36QBmagKa2K2MBfp25GgvSFEKf3ehkjWtmyJyOHHaZMSfDs/34hE3omsAFFgd5Wda9P1ZBGIRgjIOESL+grwD5dSi0ruKoRZ41zZXtx2E0vzh+/rG7t8swidE6lObtsiTQIUZ56KdyWUOxV3VPqRWmut5Bn36vR7uVAIpzZ07x5tvvMjnZJU1SyqrCL7kwCJifm3VZzZy9vV12t3fY2drmwGZ8sLFHFbYEFjJmUyXcqiTAVQpsVeeUgjFFbuscN4uv8CriJAbbQGlkTVrm5+c4cuwIIL2rVZZhi4KnHr+fRx68yMsv/YR33n2Pg0GPyipaBFy5tkxZGC7eeycLM12++PnPML1whDIbMRoOOXPmNK20xflzJziyMM8H777H88++CHlBCKRRQJQG3PPwRR7//FPMOwbL7c0NtI7QOubOO+5iZeVW7SxDs3ajKCLLszrYkYSlwLy1s5MyDtb1Ilm3x4Iytn7N91j8dHXaZa6hTuS5R42HS1XW48nLeo/1ZCvy0m3nclBatBJCEaWIxpIoyr0ehGFTUXD7v09aKleRaZgpPa21VPN8T52qr0eBFYV4qyzKuH0Y427OZz2dX1ThaKQVWrlAUDuEizUOYm5BGekrM7YmJBI9QzfWpTjHXkemKCtKYyhtRs12gDT2l2UOKAmsTSN14JEHWP98bycvaiC/1NegEakVN1/c6/JMjIPx6rEKXNPfaNwD9hBPD1PTLpgsy0qCL2PqwLnVaXP2zCmUqaiMJOOffvpJ7rn7Ai+89CofXVlmc3MXrTWvvnmJYydOcXRpimNHAn7jm7/C0snTDPZ3yMuSialZzt1zmrsunGE2gR9/58/4wz/6Npu7I6IwIkIRd9s88dnH+fznnmR+dpqr1284hjPFg/ffw3uXPsCz14mJEERHXkjQYJXsGQZTy0uEKnS9Q5kEt1ZaJZTT6dMuSS9QR89C59N0RvxM5y8FKpBkgBvvoihZXFig3++BqshGDUzdJzhrptmydElE6+afQHmLqkRp7dZKQNlKf4YF/+njUwU2vX6fVpJg3SZbN165xhFbiWDf/v4ui6fv5o4Lp3j2r/+MPBfhSB1EDPtDdBDQ7kwwO1swGg0ZjgaMRhmttEWatCgDUZ33lG/jGFYvDuSrK3OzcwyHQ5IkIYriOrrzlZKqrMQZGhfFwsNJ5JzewfH4waoShirvGN2OjZf3Vm7RVYfgc/4cPsPrv8Nn3oRZo6z/7s9d4bMryjmAeqwBWqN0KEbpNo0dMU2GC/c8yMnTp9AUjVF1hwTYtk7yjd+H/90fxpS8+spPqJSqM5T+PYd6i3xAY6mNRZ3AUpCmrTpr01zL+PeOB57CrNfv9fGK0d6gCS405+z5CyzNTbG3vkFya4U77ppieLBLKx+wT8VlN5WXjhzl7F0PkFeaIqvotFvs7OxgrUAdT5w+z8T0HC8999eEYUSr1eLG8g0u3v8gnU6X1155ie2tdTGQWpyLssj44Q9/wF133Ymv/I2yEUqHDPsH4J65kAHIvR4/fpQwTljf2eSddy7xysuv8v47b3NicZL9/oCNjWWKojg037TWTE1J1VEFoWjQWAgIaKUp7XZHKkOOTvtgb5dsNMKWpdujbM2ulueZayJuxLoayMOn83o/Dk/uf29gVi6H6vDqnkVFWL9EcFb6EgTK2Uoiprpdup0WUxMdkjSRCoWkCutADuX1BixlZSQj7ShDy8qMzUVqfHCtc2U5dO/BmFCff49f+1VlCEPpZcnzwsGFKseO5YIqJdAOq1wvDiFhENDttIUgpawoi6JJcLj1EEZhLWTsxyuOYggURV44+yTN6n7zDQNfxWlgfZ6oAGQ9YwSWUlWVNElXxsHoRPfoF8cnO5IoIXQMnx5b7pnQSlMS6IAoFBZG7WCKsoFXCLtQwMRkp6b4PnniGN1OR+AvWlOUFUVZEISyP1y/sUyShGANkdL0Dnr0raIoQ6ogOeRO+/ntoVbjkCSUVCjE0ZNkgacPdz6Cq1QqSRghbFLa9XOcP3+G2OnS4bPrVUnZO8BWGZ994m4efPhe3r90g8vvv8/uziZ5PmJ3d59jx4/SbcXkWYYpDSdOHOf99y9z7313MD3dZq6bkA97/OSV11leWWGh3SXqtLjj0fu558lHSSY7ZFlGv98jDCLJNlcFgYY7zp/lmR8+40QkPVGQqSu0yifE6sSaJ/kxeJ0YcPau9t89hKzRWgEfCNja2fK7lde7MVWzTyvlqjhoV7ETcpAwDAg9RNz9zVcJAgeNxe3toQtUJODxLHu+l8FfrAuhlAvKAlXbbu2DGCVwIOWpyRxZQWWl2mGqhunK39OhHs16don/Zo2t9xBfcWkCP0uvd0AYxbWtkkCkqm2ddcgW/7OLxkUs1o4Fe3WCtAn8FE1/ou/FkctsfIGP+4z4Zs0rvsJiEWIDH/zKIQGTRWMwaKtq+K67QGmkh5pdT4dCeVz5+6SpyC8sLdGd6OIUSFEKRsMBcaT55S9/jrXNPZ754Qtcu3KdrCh5492PWDx6FGsH9HsHhIFmamKKPBvy1JNPkKYxqRry3huv8Uf/8S/Z3B0QJS3m5xd4+NFHePKpx0kSzcHBHru7Qg1tlRDFnDh5nFYrZjjwcLkICyRpzGA0dBNa1oJ/1kVZEYSiLaOdbFCTSJRAt3JBsxsUIYtonEiZp3WgrZq+NaTqNz09wcHeLkpZahIIP/dcgKiVEmZY5dn/lKuglc72qToe9v7SJzk+HXmAFd7vshAcv8ZTwTmaQqQpcmfjJqfvfpCFE6f4zGef5M//4tts7e9gLQSbG5w8dQadJhRFycmTZ9jc3mB3Z4dev0crbZMkLeJYggZj/WJxZtn14oShUPkqpVxAY6iqUX2tHvrTZKiaZl6lhJ0sdthjX63RWnNwcFAHJDp2E1+NL65msfsMQlk20BKlNBMTE4eCFn94GmP4Wc6lnNO4BiuqxpkUx1CPES001K5hnPLUV36FUFeuRE5tRJTyZvJnPNOx69Bas7Oxyuryzfq12xXMG+fWX5fPINo6C2QtJEnqV8shw3T7mPgjG43qiN1/py+59/sDZmcXODnX5YXLlzgxGhG//RrdIicF7lKKK7ZiYm6e+x99kt7QkOUlWluSJKR30GNmehJjS155+SUuPvQoT37xl3n/jVdI4oje/j4vPf88R46d5ktf/y22NlZ4+fln6O/vucVmuX71Gh9cep+zZ88BwrqiQ8VodEBZ5ARaGu5BsPmbW3v8s3/2z7l+9Sr7e1uU2ZA4VISLHZaXl52GjXFBYMr09AxaaQrXv9ZqJXS6XQgCyrxgNBiwtblOmeco6/snhAEpy0ZSlvfVGO8NyZB/7NMfD1R/VmVmvFIxHhT5jdlXEaXi4TZuv6kHmkBJpjQMNGkS0+20mZpoMzXZZbLTQSH3XlYVRVmSJonDtbuEQyibdp5L5qay1FStSmkX7AjDnFRmAgkUtDgXoyyTfrxAOwfQ1sZyvNlabIAsnDzP6wqOvy9TGTSaKNJ00pTpmUkWFxdZWFxkanqaKBbHrChLirxCbCgLAADxIklEQVTg4KDHwf4+/UGf7f09hr0hw4FUzGpGImtr5jlZSzR6e2PXpZSirEq8yKCpTL2mvQNQV2q0ctolTeLmF8fPP+Zmp0hbaR0E+wyvdVU9iSEFNhM4AyvP3KKtsEZGQUgUxaggoLKKV954l/fefZcgiChLU9tqY3B7lSHUouEWhhqdpOjI0LEiYyB+4eEMs/y/h1GpscS7r+pLRXEsMiJJUkIdUNlSEAjOCVNKce7cmbHquHw+jEJsGVAWht2VFVQQcM9dJ7jrnrMsX7/Fm6+/Te9gH6Mibq7t8vDDD7G8us7ufp8wSVld3+T4iRNcu/Q+8zOzbOz0KYxi4fw5nn7qs6IZYhXGRnQnJ12l01KWuatsKubmZ0iSlNFw0JBgWJnzUrFompzrfpmqwmotFMaIc4rCrXMHkR1rxvcesU9yirMl/bumEjiyVoo4jgSKGrkKi++HdddRC2jLyeR1dD2eIA6cFwUNtG78psA7ktTOnk+AVsb4Fn+UlUClTiwq6+yZ6zmuCnxfD9ia2t6YhvHMJzpsJb2DUoERmQIfWBjjmF2tMJh5tlillATmQeSjRHdtqkbGK0V9j8rPTtdLIZflURt16Oje586mXDWp3qsUdU+Tql0L+bsPuFQDkPe2smYMVGNJMgtK2TophHL9V2POtrVC6uMddv+lUplyvpx1RDLAiZMn3JhLACzog5Ayh92tddIg4Dd+7Ze5ubzG8y+8hlUheSWBVVYann32eZ545CEMlqoc8eMfv8DD91/kow+usLWfM3v0NE9/7nM8eP896EiTl0NUGdKdnJS9wFjKvABbMdGdYH5uluuDFaF+Vs16lvFx42QqlBW7ZRw7Ikp6sVynKB5Obn0Q4x6X9Kg1wW894W+bwH7OT3Qn6LTb9Ps9Ai06d+ASgsa6CqPvw5bzh1EkOlyVVIn8nmytVJlGWSM8+vOOT0f3rDRlUQq8wsrkVkpT2YqiqqTspQP6+1tcu3KZYycvcPHRp1nb2ufqlcv0+31GoyFZnnF8do7hcMD+/i4TE1PMzc0zHA7Z39un1+uhFERRTBTFLiixdYYAqBv+LQJPiYKwaQQOAuIkEUNvLGUhmes8L2p9GWsriAXD6lmIAHq9HtZaOu2OBGvaNk67OxpoVXN4JylNkxq/P+7EN7h6HzD8dODTRL/e2VR134Fy5zBlgTEVadoRdqggZOHIaR548D4C5XHFjSXwG5mc83AD88fB0JY/+pAsz5zoZFPVavqFBMagHVe8u3t/FhSiy9Fut8CLeDaj4GcS3rj5zxVeP0j5jGizoQ16+8zOTtCdnuYzX/ky2bM/oru+wYSVCXyH1ryjITl9hrISPSFrodfvsTjXJYwDnnjiMW7dXOa97au8/vLznL/4KA995ku88uxfcfToEZaXb3Lrxof86R9u8PATX+Arv/KbfPjuq3x4+TJRKJnJH//oWU6fOYe1lq3tfYIorQMUA1SlczTLgnffeYepm7eIo4AqH1FVFbNLC7xz6QO2d3ZJ0pR2q0UYCrvgaDAkimKmpmdBBwyzIetraxI0OdNUVsJFXxRCD92IYI7Py/GG/58OIm8PaG4Pahpq47FAxlIbvBoS4eFkymvHyCYfKK9wHjHd7TA10aXTbhHHEWkrFlpLZwDjOKrtY7vVIYq0iBm6jHIYaUxlaE22qaqSOI4pq0qgWth6HfmeLKsUeZZRVZbSGNIkwiAQA1NKs2JRSELDVwP9MFkgiAKKLJeyd1kSBppu0mJ+bpYjS0scPbJIZ3LC6Xc5SMrYZhpEAUkSMzHZRZ88IhUWNy/KsiIbDjnYPyAbDtjeP2BrZ4/trU3yLHeQ3rBea75SXDtbGAIr0LaqLOu6rWIsr0ADt/jp3r9fHD/rOHvqJO1OS8SftadPH0soRRFxktLtiri08dT+CqkCuPVU5KKDUwFf+sov8dLLr5IXXtJAU1W20RoiII5TtBJB0KWlRZK0zWtvvoVFUzjdNh/kay2EOTWM1wJKqkzjQWzpEQs+TLO27mOVdef6yaKA+bl5+oMB/V6fjfV1kihkcqJDHEWESUJsLcNhH7uzwrA0TE50+PKXP8/B/gHHTh9n0BuytbPDAw/cx8X7H2Bre5fnnn+Rg/0dSqOIJ2b47NOfZ6qT8Ku/8mXmF+eZmVui3ZkS0p+VG6zdusn21gbFsKDfH5AbQ6s1yfTkBOujoVtftg4u5V410CR3rG2EdmU9+OSLwhmveg/0qX2FCxCcc5bnOb4EpLWm1WpJ4tQFt8qxhgljnsCQFYrKu45jjp11yYVag8UnKhU1g5n1hEg012DsYQZN5S/YzS/jaYbrSMLb84+BFlsHqbJyfcbJEIhf4KvQTaO89UODBDzaBRRKO+feWAjENtfMp05uY1yTSyEik0KuEIj/MRaca+VRMKbu0QFPn+xycq4W5QOdOp6p2dK8b+DXoFsT9X7X+BX+2Vsb1H1x41Ue/6yKsnJSHBYfTMWxCDB7oVsXJ6GVYml+nuFgwHB/m/3dHSYmu6RpTBhGJGmbQf+A7bVbdLsp3/zGF6losXB0np2tLaJ2j8fvf5j777mbIh/x7rtvkra6bO4NWTxxnl86cp6/9bf/NiePLZC2Qia7HXSg6B302N7cZG1tjd2dHYb5iHyUkaQJi4sLXL+x0uznLkmstK7vUWyJBGJCzlFKVTGIpIemDh7dPMYK9BWZ9zLG5vAQuw/ZekRljp89c5J2KyQfDVBRSFFktTaNR29EcehgjBLE+OAxaMU1wgEsg8EIrRVFmfNJj08X2LgsiMfGlVXFYDCkMIb+cMBgVDAYVBgVcGP7B3z1W0e474mnOHb6TtZvrbC2usLGxi1WV9cojWVu7jjoAFMJVV9VwuLiUcI4pCoLdnd36R3sUxQ5YSB4wCiK6gEEKXVa6yBLvhEaEWGKoogkTYXyOdCYypJlIwb9HkVhGAyGhGFIHCckcQtTQSt1jFNxDDTZr3E43O3OZFNGDWi322NYZqFC1FoRhCEHBwd4KBNji9DWBqo5r/zscP8uIPZG21rpSWkFLcIo5pEnPk+3nUrFxmd93YIOaBzSQ1n4251XpDS7v7VBGGqUFj0fUzmooTOWotArJc/mesYzKAqtQzrt1se41Rx6duOHCOSVgivW49lmS+9gnyA+T24gnJgk/uxTlM8+w+mNHRJj2A0054KI/akFRpnBVhZtS5StGA17VFXFleurLC0sUpWXwVZ8+PbLVOX9fOFr3+TSmy8Thmtk+Yh8eMCLP/wLFhaPcvG++1haPMryjRusrK7x/vvvc+vmTdrdSXr9jDjVmDLHIhtSVYl4pmxQJdYWjLKcvCiYnZnm6vVlsqxgZnaWJEnI85zRaERZNBj8ne1Nd9vGBeO5g5U5/aOPGcHbA9RxuN/4fGqe+8cF0U3w0tA8OufOBzCOYtnrs2AlwE3ikE47ZrIzwcz0BBOdVJyjKMBWDeVEFIYoKpI4dkyAko2MCEjSmLIoQTse+0AUmlWsMKWlM9GmKErEpzTEcUBRVARaE0VJ3RjdmuiKsnNRMcoyx6ymsUifRJ7nwkJVluSFCDMGgSbLMoJQ052aZW5ulsXFBWZnZ2l32gRRiNUBxoxtpEBQ8/6PrScXlPksU2nEWW61I9I0QkearS2D6g9q2JAIiArZgdWNyxYGoRM/1nXQI45X4DYk0SEQH1dLXxXSU1T8jHX2i+Onj5srK0xOThDHEdYaWu0Wk5NTwo4ZhpLIU5DnQ6pCoxz8SDvbf/XadaqqotudIG2llAbOnT3DsWPH2dvbw1pJ0uVZSafbZXJyguPHj7M4v8DK2iqZgdmpKbZ2dsnLiihNCaMYSfjI2jeVBMa+etAk27zjgbNB1ZiDqIgjz4jWrHOtNO1WStJK0Spkcirm9Tfe5a/++gfEkWZ+dprFhRkW5maZnupKbxcB26vbDEcZi4sLFP0Jzpw+zfETx2m325TFCG0zPvPoQ3x0+QNOnThBq9XmzgsRJ5Zmmeym9HY2Wb70HqvLy3zwwRWWV9ZZ295le3+XVivl7NkzXHzgQYJACxzXO6GuEuGTGT912J9ONNaQJJe1EESNbeKE+jzCgBnFAg1EgSagyHOKMWbTOhjy53e/Wb9arQ8PnIung5qIRh26NuX+dhhi76+rrp4duj/ZW/X4Kaxy96ZcRaG5bxhnHpXr1N6BcAGXViGgPQFcTXhQ69xof49azq+kSuX7YjUQBD6QkesQW2gJlA9ChexJu2AnUIpAC4TY91/6KN1DmSsLeJZZXO8kXuFeWCMPj6YEL9YjS5S3zrf5V370xh1yRIMKzxZoXJuCexhlZYjdO2vEj3sck5MTBFFIt9vhD//g37O5s8tEt8Ps7DTzM9NMT00SpSnFQZ/dvWukaYs8O8HSkeN881d/lU6nzcHeHqYqOX78JA88VHL82FGqsmJ+aYFUj+hv36SXj/hgY4Nryze4emOF1Y1NdnY2UWHKiVMnWVpc5Fj7CAvzc+5JK2xVgW6YQX1Y5qF71ifkfA+78n1L7jneHiyqZuC83+ci+TG/kjrRqbXi8597kiNL01BN0kpTwDLKM6w1hEFIkZcYK1DvUIdC0mLHoJPGJzEsWlnyqgQSPunxqQKb0ljKwZBhVZJnBf1BxmiUg6OhNAYMAdZA1tvnz/7dv+GBxz/P6XMXuPOB+7jroQdEk2M0Ym9vn+UrV/jg7Te5uXyF/f1t0lTgNINeX6oSSYtOpyu45rJg0O/R7/cwldA0e2pYtKPCHKOzNMaQZznZKHOsEMKdHYXCphbHMkimgtEok9eiuHbc5KH7rLQ3WE1mpM6Sjm0u7XYHpQSbn6YpU9OzQsVcFORFXquG++ZTi/+8TKQwbNiNfJavuQb/xdpl3axQy6J4+IknUFSia6AtPh/jnVRU4/COO7f+e31AEgSaM6dPkASKURAiSLBA7twa6Q2y1Jtrrz+kMZ3UGZQ4TWi32x9XMPB37QyxrxxAXkgFQummN8kYQ1mV7O9tEcUpRd4jUJqyM4l57LOUzz3L/MY6YdphYnKOXSN9CYG2jIoRE90266u3yIYjtra2SOOw3vRNVXD1vdcxRckDD36GDz78oM4qYUvWV6+zv7vFHXfey5Hjp5memePatSu88NyzPPrkU4RRyN7uDtmwj8JS5LlkQ9w9aad9UhQ5k1Ndbt66iVIBnU5HqAv7AwdFEwVgYwyj0YDcbaiV68NStT0ez4k0x8dDGg+//lMBDLbBdnP4danGBE4gS6BYgavOCNtMQBxpJiY6TLn+GEkcSJ4tiiOKvCDUTeBTVcJWVkMorUWZijiM6w2jKivHaNjAAoxjIVOhIRuNUFqTxCF5XjIaFXTabdfQrSkRjH5VFBBq4jAgClrOYTBOeLRNlueYqiJqtbAGWq2UhcV5ZufmmZyeJm2lQrGMw567jJUkKxwq3TQZUA+T9QvKWiNBWyjVJltUrGxucWv5FrdWV+kfHNR49DCMwDaMbd5ZDTzLmW16gKgheGIztHtPrZGjwGrtkhDmEMnCL46/+ZiamqTVTpidmWFmbpZOt1M3TWPFuVJaoYIAZUXDwmDZ29+nqipOHD8q8GM0V26s8tKrb7N8Y5UjRxYIXVA9Mz3D1Mws7VYLi6U/GNIfjkjSFkcW5+lOTJB2uszMzaJUQK93wPr6JtmwT6/Xl33WIJTRGidWZyQrXpsA54yMVWSFbEecliZ5Zmm3E6I4wBhFkVWUpWIwqugNKvb2N1jf2qfdukUQiBBu6Igrjh4/ShBHRFFMEkckoWbYH3LnXXewt7HD3v4+SwtzbGyssr5yg+3NTZ7Z3GBnbYNiZxc1yFGF4WA0YnU4YK00jMqCe+48g9KKiXYHgyKMIrwzBtTrTHTcxpM3mttNoK8eGFOiHLEIrgLgnT2PnvHOU52UqAMRF0i5IEgc2qaK4islClMnEWVsZf1b1wfoHkQTCFlnM6wLG5TfK5SjKPaute9blev2xBW1rp/yPYW2vh7jEpDS/O4YwBzc2euIeciaVU0VojIGpYPb/BklQRNgjOwLxo2HUqqGyimc72cb7lNf9cAK2A2Lgx9px9cqlXYveC7T1fc6gVQiPS2gr2mF9fUeTgI3c72p2IyRJiHrpHZGXMXFKhhPJHv9H18lkH05qgPk8Sqg0k7XUGn6wyFZnjMa5QxHBWube2CvolEk7YR2p82Z06exaHa2dojjNkeXjpANeiRJxO7eLkVRcM+d57BVzvbWLq8//z6rt25ysLaG2TkgH2XsDIfcHOWMjEUpw/HTZ7jz3vtZPHocrRTddlsCQ2Nr3wMcQscjE9xj9YKrdfrDO0UIREz59Vavp2Yu+7knsibUQbcEO9JflaYtNtZWGe1vEGhHPe0q4aH2PWgWtKp9Ysa+N1CBY5mthDU0UHTiBFt+chTCpwpsllc2sGhyV+6WHjFFWVQuuqtjRoJAU2UHvPrMt3n1xwmtzhSLx45x/MxZjp86zdGTxzhy7Aj3PfQQOxtrXL/6EVcvv8/1qx86kcTcVQdKRv0hZVkRhjELC0fqiDwvcgaDAcPhEByhgaf3C4OgHniLchG51xFp2Ks8R74XCPX/9VAbCT5k0AvXmH176VcpXF9QXEPQ8jxne2vbieyVtQPbNPzjKJ5l4lSmoijyevLYZh3KPRyq5shyD4KAM+fu5djJYwQKkigkUA4iONZrc7tzW8PRfPZIqdpwPvL0l/itDy7xZz94gdbcEjqUICYvcpdOEZ2E0WjIzs5+bcyazQHSdoe0FSHaCmOGZ+zn26MeoRJXY89F/pnKMBr0AcVgOGJ6YgJjYG9ihjceehxe/CHh7i7ZmYvSw28rklSzudnj+JGzXLn8Bnk+RKuqVmP22F5Tldz46F1MCV/7td/hO//xDzjY25FsldLkWZ933n6NuflFzp47z9KRY1SV4f2336M1tcD6+iqjYR+tNVkm4n3WmprgosgLJqcm2d7ewiIwRaWgLDOwlXDvj0au0jgOK1NjhuRwcOLnx/iYflzlpXn2TRbPZwIVDeQMqLP/PqgJA8dmhmg6tVuJ9MdMdpie7Eg52TvVLtXnaY+jKCJUQnOLEnYU39AbBE5MMwgpSmnmDEKhcMUKiYLAOF3TIqa+5jCKUFahXBAQ6MjRVsZgLHEYUGqwVrKs1ukeREFElgvDTiuOmZ2bkf6YpQW6MzNC4e035LG1idusjasIG896pHBrvNENETshZAcHvSFb2zusrW+yub7B/t7emAq8dzylGuMTYT6QF0dHaHmNz5r5cfZ2wQg1alGWknnVmsBRvdabUV0V/sXxSY4zp09y8tQJgSg6m+OftSJwa1pw7YPBkP3hyGkOWeZnZ5pEk4Xnf/IGa2vb7Gzvij1zQtBlmbNy6wbWGEfbnhBFKWmk2dlc5+aNawIvxUoyLMvY2z+grArRxLGWKNaEVpjXfLJtXMC6gTc7+6Gk8RYtzrHMVXk9DBvGwoODfaIo9JudkKZYJUnKSlNYiypzuhNdjh4/ydz8DGdOnmT/YI/1tTVGwwFBoLh1c40szwn0aTY2NlldX+fGjZu8++bbqN6IkzpmJk1IkohBXmBLK5oWruIyOzNLXpUEOnYJCUf/6xaaNYbABXJNOpH6veN/re2n8o6Zg7BZnIgt1E4dHjYmyYTKiiiqc+vlObrzoASyjfWt9zSEE4BFU7lss7UOQurmh3e4m9Z9X5GSAK4yXrcHp7dnG4JDNx+boEO2Y2WtSw7Ja9aI3bbGOkp4UNbvE95xpE5E+3HwwQn4JFqArXDzpqpZGMWvlcGt8OQOQtqgrHV9+I762u89fi5isZXckEICJj93y7LEOj0z5fsuvJ9QC6Z75IgnGYBDQtG+KlOPk3siSvqe/F5qZUnI07AWlO+B8vNJnktVlVgTNP2Zfq/1+69WDAZDFBpjFJVVjXdhFUW/YlRmnCThzLk7qYqMjbUVtudnWVm9xYkTx+j3evT6A7rdLpcvX2J9bZ3XX3+T0f6ASaNYjBK0ElKgYW4olezRWWnpTs4QhhHWlLL/unlvXKDajJ2u5xDNcMmTHvM56vXh3tT4oC4QVz454hjRfFOotbW/WRnD3XfcCaYiy2R+jFROEIZS5XN04VEsCfMwkoC6lQoltPihlUt2FlSuzxwbYvjfqMfmYJgTJ4mbCFBTvRlNaaoaC+srG6YyaG3BjBgd5Fx7f50P3nmFMO0yM3+Us3fcyamzZzl+9iwnzp/nsae/wN7ODus3V1i++hFXP7rEysoNer09qjKX8zuubN/o1ml3mJmZRQWCzy+LgizPRKMjL2pSAB+wRFFQZzc804V1/ORVVTVCnc7Y+HUpWRlb65CMBxrjELRmkzGYKiN3PRFyCo+HlYniv6dx8j8mu34oO9H8HcT4zS4sinouhjAKiZQ07nk86WEn97CD7G6LcYz2CMVv/pf/DRcf+Qzf+fZ3eOfGCnpimk5nkjAWZ6wqDaPRiMmpKQb9/limRE44PTNPEkZQN7kfzo583M+lE3TCjgdeztErC4a9AcWwZG5a+h9MGLI5t8DLDz/O8I3XOJicIisGTHZb7O7uuL6qit2dbYwxtDtdJqemmJqaZG9vT7QDtMKaguVrb5OPDvjK177FX33nD9nb3UEF8lytNWysLbO3t8W99z7Eh1eucMddbZJJw8H+HkWek7aSWl1X5oMLiq2h3+u5CldElmUcjDLpJ3K499uP25//xx2H5sBYgNJgzGX0XIKJcXpwr5juf651ZAL5bxJqOq2EiU6buZlJwiBgZnqSylcoQERjXeMszlAphROLLQiCkLKsqGxBHMUO4SENsEEk2RkfSFVlJQ2mBnTgekpcL4ImBCOBl2RZHa2rM8BhEOEZiqyFNI7QSkkfoLJ0W9IfMzc/z8zcHJOzMwRh4jZZEWM0frwUKKvr8Wwgp7b293DOgXHY0KKw9AcjVtc22NzYYvnmLXp7PUwpWjRhEBCLFacwQhMvG7IhCGLX/C2VoZpR0Y2zh/v6jcPSbK5oyWJ5nojS0U6P25+Pm1u/OD7+sGXF9uYmoyyXyqKWXix8UgqhU79y5RpJFDG/sCBVG1OxvbrixCMThkXOzETCi89/SJlX0qcZaIbDAVk+wFrX3A+AYyqzJUrJcx+OMgnqUWOOMXW1LghdgqLuUXSZ6DpT1jB7et8lCJ0AobUcxkTJecqyYpSXJK02SmlJACkATRy3mJub5cTx49xz1wXmFxeJkpiXX3qBvcl9bty4wT133cHU1DQTE12OHYOtrW3eeedtzp8/T3+Uc+/FeTa3egxuraNHQxQWUxSkQYhWjkAFw9REl6PHjolTqBSVrWoH0yf2LNQsj4eso/NPjXVQTNXsrQrqvpUmQBEbIo3gklys+0BcQsG4RvOGSl6AwNopxnv/wNsHHFWuvzJfFbdIAKCt9CL7xIMYLeUCEF9RUDXjY+UelwJMXY2wGOP1VeTWPRU8NFUoz5rWMCM2wZAHzGEVKqDpl8HbQeX6KhzpkoKi9HuLT/a66rHTr/GoDp98aWBmY44vsg/5EfN1GO3Gw3pxUHC9T/6qGqhTHeCosTnv18LHbJceyli/1Y+jH39/1S5ormFoIAyZYShrzCeUJOKU5JZreyiMJWq1yattICAMQzqTU0xPTXL69CnOnz/HmTOn2NvZIhv0ufrRFQ5OHWdmZoog0BxZWmR7Z4eNzS2uXV/m2ImT3PeA5t033yPsD2VcbMOyiQKjNCoUGJw1RsbQM9i6e9QaF3i4LID30eokwLgPASjvl9qx+eX9MB+Oq3od1fPIPz8rm/Ddd9/J73zr66SRIIcE3SA9z0kc0mxLApk2ThsSP2ccJFA0mySJLjBPYZ/8pMenCmyMFTYPv8F6LYZizFj7aD0IdL3Zah8uKgi1xWY91q+9z/r1D3ghjJiYnuXoyTOcPneOpaPHuPfoEe577CHyUcbe7i4bKze58dGHXLvyIau3VtjZ3aQoMrSSbEFZlJi8kmvDEicJaZI6MgHZgLJsRJZlFHlRqxqHYSCOl89wqiYbUTs1tSCmGMRSlc45kaFTSjE5OdlEt2MORWWkOuQPYdhxXOPWZ9D8uMl/xzG9zfd/vJNrrKLf75HnOYE1VFWbJI0IxuBr4wHN4cZyxgwQ9fsBcgvnHn2Cf/LgQ7z94nP84Ps/YnVQoOKY6ekpOu0OpizZ315nefk6Ji/HTqCYn18kDjT8nAh7fLFa79yNj6HFwbU0K8vLzM7PyeuBw/rGlq2lY6zeZdgvKjoTbQaDAYP+UHCZxYCyEGdyYmqaYVZy9NhRsnzEaDRyrGtisDbWrvHW64pf+uqv8cz3v83e7jZRHGEqQ1GWjIY9rl65xLFjx+h2UjY21+gf7BKFAUWRH3rO7XZb2L6Kkv3eAcPB8FAg0wQijZH5JAGNf/1QwOr+x1MtKh/QICXzRnAyqCtJWisCrQi1YPBbacxEt830ZIeJTotWK8VnbKIwZJTnbi1JtjIMQ6m2BJo8LwRSpUSENs9LkjQiywriIK4pc3UYom1QO/DWQlnKhhzFMaUj9ojiFqFLQhSZUK5HcSDQPCV9Ljp0uG60C3QgCTWtSHP8yBGmZ+eZm5+nOzlJlCQugyU0qWXpG3ptvZtbJ37pgztfMQ1qmJdymTDLQe+AnZ1dNjY2Wbm1SZaNwD3b1EJrquOcLGFAVK2YyojAbxmH5I490SdIAiVMVGVZSdWpKMmrUjJxLhvrGyl9P6GukIrO2DzwNsz/XlafPLv1v/djb3+frMykWtiXRu8oCmVDNpYwCtAoTh0/BuASA85JtOLkZEbYQe88sUDrV77AH/7Jd7h2c5lsNKghvOKTObapCqSibaQnoaoIwDWjq5qQxLuAWG8rPJMV+JPKPmidQ+vsh/NdirKsM+14Z0VRC2FnRcnU1BRWRXzhi19gcWGWhfkZ5memmJzsEAaKsqrIi4ph74BsqLlw9ix7ezscP3aUxYVFNtbX2dvcZlhktFoiTHvzxnVMZXjo0YdZPHaMlRs3aSnoBBrTL9hY32Lng/fZWVun1+8xMTeLjkTjzmIZDQfOF/f/Jzelg4b85vbgfXzv9rAhWYyiQWIDVa99izhMfn/14ylmQQJNHLWy6Lj4yo2qs9rKkYZoZC8NHLTKKKFiFo9a1461RypoHdbVEeVJeGpb5BN69afEkXfnCVzQIr0PDfyrdtYdY1pNODReZRirNjTncD2Bfs4ggYpsKp6lzIwRV1iXVW8CpfrasbW2jR9RT7NdV1vc+Nfh31jQo3wPaeOj0wTuY8/aZ7J+6mjuTUqVDb2/P7HfL6mfXJNccjMAhbQT5EVOqmMJ3rSrflqpiA2GQ8qypDM5zT33PcDZ8/dy8uRJ5manaCUJQSBrNcszdjZuglYMshF33XsfC0tH2dlaoyhy9ouCIBJh8InJKU6eOseXv/zLvHLn6/T2+yxMTBDpkLWtTeavXeXm6io7u7vMzEwTRZ7NF4pcpCaMlbkryUSNtWO+ubcXge8DbXwt2d/M2LOT596sJz/kPjj05b7GH5mbmebv/d63ODLXRVG6oL15OmEQur2RQyKpSqmaka4qC4yxRFFIlmekSUqnLX6F+dhn/vHHpwpspHG3EoiYMaAsldPf8Ie1wmAmOjPy96Iq3M1rp7NgUFTyAIqc/Y0+++vLvP3SD4nSDlNzCxw9foKTZwT+c+/Dj3LxkccoipzBXo/1tVVWrl9n5cY1lm9cY339FoPhAUpV+HKZMZY8KyiKPl48K4piWp0uURSKsKGBPM8cy1Qu7ESlqM/6UmfT46IIw4Q4Tmp4ivCABxRlgVKH4R+iWp/XD72hev7ZEJGm92W8F6YZW+90yTgLlvL6Rx/w42ee5+nPP8bm1i7tNKHb7dBupdLP48qQkhU67Dgfrug0e6J8gcJGbR784td58Mkv8O4rL/LDHz3HoCqYnZlEA48+8gjra6tkpUBttKPBPH32DEqZn9p4Pu7w7/AGzlNcymtSmo/iiFs3PuT0uXMc9A9otaQXI4wkwGzNLTDaPaAsM7a3dwBLt9thd2cDnLbB3PQ0yzdXCCg4duwY165dO3T/qIq11SsYCh5/6ov85PlnaKVt8jxna3udxaWjdLtTdCa6TMzNc/XNt8jzIWmasLG5UavYd7tdyrJkdXX1Y53Lw4GLqhmYfvZ8OBzwjFezJMPqnp2rEonmUUNDWv9TiiQKiAPN5ESL2ekJJtttkiQmSRNK38+CMCspLdoMCmilCWEYOsMkazuOIuIkJo5ix/NvKcoCr0WTJomjtS1QWgujmdOeKnJhY4mTpMZgh2EokCpjnKhlVa+7oijqNQQioBiHAdNTE0zPTjE7P8vC0hFanQ4EAaaydfWicLAxqhJjFUEY1esoODTWrvKlpFKHqqiMpd8fsraxycbqKrvb24wGQ1RliONIKFCVkCpUZYUOJdAKQk1RQRTE4lgGAXEYoNsi1mmxIjKoJGOrg5B+byhPNYqolCRFslHp+jlkRw20cps6zuGy9bg0bI+yeoQ57hfHJzkOerJ3aB2K4nUQMCpKDCLKWZUS0Bd5Ke8x0swcBiEWSah5XHtpKuanWvynv/NNXn/zXZ578RWuX7/Z2ELHLqQdna5U7YyjopUgxRiPfBhPNzPmzDUwKLG73jkZq2Q45y2o92Z1KIObD4Wa/vSpE5hKMRwMOH9ykdGgTzk6oMi22VndgDAkTrvs94a8884llpeXSVsJZ06f4td/89foH/RYX9/gYHcXHQX8xn/ye5TljymyXc7MzGCLjDOnlnj04XuZm53j9KkTtNpdBv2Cmysb3Lh2kytXPiROSmZnpuj1++zviZh1nRX2CSDlIeUug6zEzVLai3b7MRPnNI5jrKkYVqWIN3p3WvtsvQuQGHPEnT1ypQQaOKyqM991YkT5zLUBAlAWq5yWDtJzaNSY/ol/dnas56kOXnygEIDTq7NUjFfZ5J0uQDH1b1DDDJuOEuUDGX9bqtljrQ80GNe2AV+OsMpDjcb7soI6W48bryYp6+/JBQZ1rKPc5ZtDBAc+xq51+VQTUPgw1mstKXeNHvmEp+72q8A2MVv9PKwkhOvuKiVom9sKFWP+yXiwKn/RSuibGxdmfCFqdnZ2efSR+5mamGB+boZ82CMf9BgNt+n1jUMyBHx05QqXPvwIHcUsLZ3gM09+ljRJ2d7d5cWXX+LChQv85m99i92d73HnhQsszk4y7PV54vHHsGFAEsdcOHeBbrfLaDRkc2ODtbVV9vb3mZjokI1GUOYMej1JWipNKZzyTi5B46m7rbGYyoLTjz48ID4pUE8Gt78Ezlev41f/lNFupgcapqe7/P3/9Lc4eXSKIh/hKbBDp/VWVRV5VTb+tGeftNS+k1IKHUUib6A1nXaHMAxcL5mq4Z6f5PhUu98oz0nTpCHxtU3TlTjaAeNiWVUlE9fDwbS2jh7X1hdr3TD5bGM12mf75gGbNz7ktR//NUHcpjM9zcKJUxw/dYbjx49z5s47uPP++8BYsuGI3t4OW+sr3Fpe5uaNa6zcusnO1gZ7e7sywRXNkjEVg0GOPejLBoWUvTrdCWbnBL/sEyC4rGuZFZRV4RSdIY0iWmnKcDhk0O8Tx6KZIQGROJJhIIKNPivfOLTqtskjPx+Gv3iH1taBmj8O0fEqYeX6l//if+S9d3+Jz33hac6fP0U/K4gjTRqFJK2UVprSimPHYuK+ezy7X3+Xu0Kl0I7T3FhQSZuLT3+Jex9/kg/eepNXX32VYaGYmppmqtNla6zvaHZunscfvg/sz6/WeHiBN5XGGGlqrA2yMI2pUpPtb6N0wO7WPt3uJFChDKg4YHpqiq2tXbZ2tigLQ6edMjk5wQfvv4nPZKbtCfZ23kFRMTU1QRTFdZVFOafEYtlYXcYayxe+8nUuvfsGN2+sEMUxx0+eptudZnpuka3NPfr7e46u0jAY9OtnMhgMDlVm/iYo4fhxO0xwvDJT/0PVPS11j4uS+d0EMg31chQq0iik00qZnZliotum1YpdhlHGVxjOFFpHdVCRhCJY6DcJHehD1QBPqz7oD4iikNKpG/s+m6IoXD9RQBQFdU+K1gJ9CcJQREmrklYrpaqE7Ux6SayjU4bQWmwlqs9xFDG7MM3xkydYWDxC2mkRJQnK6YxUpsI4Q+kDqNrhcdlO7TLeZen76uR1gYxo8jxnZ2uP7a1d9nf32NzYoMiEyCHSiigI0M5gS/N/CMZiCsdYZy3YElMGziAbWomsO9HkUhAGoC2B9pXtAK1DJtotRlnmGIkUeV5QpaKB4dmu8qIU2ntrpYk8DCmL0iVdhCnLN4Zm5pOLmf3v/SiKguEwI4mdPEBV1vsaygchPmMt1Tffm6mQvgKpxhv3fKTB/6EH7ubOC2d5+91L/Mdvf5cyL+tgxLnmTkcG17fhvlMI7qQ3QikHfbfY2iPB+WKNQy/pEeszHnVGNc9y4iS+zQ4pslHB/t4uUZRSFdLLOhz0qIocqgKrA6fdEnFj+SbvvPsBB70eN27cYHFhgc/83me48uEVbly9SgmcPHMfVz/6iGd/8D2OHj/Dn/7J82xvrrO2ts6xI/M8/ND9fO7zn+f4whRlGFBZiw4M7W7EiVNLjIYHFHlFEITsH/QYDkfSr1H3Hrq9y3gneqxa6Rw23+Tsqx5uE5dPqtsZtdTH/OR3ZU8pDQ3cL6jPLcFWUH9SWZ8Nl5+1cuQj3lOyWvY15xn6gKDpHbHNNY85lYcuzHncUikxjY/iZ5KvYjj/u4aL1ecf/7yqA2zPvIoVR1NO6YM1N1fra3G0RNLIg63fN3at1sGSsHXvkfXj7wLF+jrc78YF3HUlUomI5qGY3n2HtQZME5j5eWExKHt4PYz3TflKkhcdFwfbex8GYz2FuzxLb2OFQIA6yBN7AKsrqwx6A4wpqcqMPBs5jTHRpeoNhrzw0iu8e+kyeWE4cvQkjz1xjrOnlvjr73+f2aUlVNhic3OLP/j3f8ATjz/O//L//v/w737/32GNZnFxnosP3M9nnnqK3sKi+LCBotNtMV/NEsche/v7IuxtKm6triFQP0e8hEJbewj66q2Ivwc/uL4i4/taD889SbhYY7DaB35qzO+A0yeP8rd++xvcef4Yw0GfykjSx5QVhZKKcVEIqqHu5XX6d0VVolC1UKeIYge1YHZeFFJEsfZToRA+HRTNlJRl0LD2AF4MrywrvN5Mg+FzC1+pWjBz/BDb60q1vvTujJEve+XDPbLBLqvXPuK1HwFK0+pOMTM/z/FTZzh19ixHjx3j/H0PcPfDj6GsCCWOen12NjfZ3FhlfU1EJzfXl9nc2qTXO6DIs7EsmaYaZQwy18eDAet4toOAKA5JtGhueEHA3sGBMHAFAVmWkaYp8/NLZKOcUTaSjCxIiY3D8b6MiVNHr1MR1FkbH2GMV4uagEbXaujShJqCynj9ue/y9ks/Zn7hCGfuvJd7HriHOy6cZ35+miQeEAUCO0pbLVpJIn0f2tbZDH9xCl/laZ4dQGWAMOXCI09w50OPsHrlI9554w3uOLKE3d8nTjocPXmCv/tf/ZcsLU4zGo6knO43n9uqNw3cTOaKnzs6bDYrXNOjJza4dv0K05MTQmvroI6Bkmx5vz9gNMyZnJpkNBqx1O6yu7tb30NRyHOxVc5oNKQYjUg0GO2a7vCNf5btjZu89gpcfPQx9vYPmAmmOeiPmJ1r052cZfnd18n6+0RhwEFvvw5cfxac7HYY4Mf99/affYZiPIBRujEo2jmzQhVsiQNNHAV0Wy1mprt02ilpGtNupTJOjho4SROCmrFM19niKIqIIyG3rCrryDfku4uikKb/MHSwToGmRVGEMaaGTQG1bZBkRoAXfovjyAUUiiSJqKqAbCTV0iiS4KoqDUkYQlkQhQEzc9PMz82xePQoM/PzhGkLi6cXleSItr5/qAnOpSrjG+olG9Q0/ErV2CrFMM/Z2t5le3ODra0d9nZ2KfMcbY0EPcaQxjGF8ZnNwMHkAvr9/pgdtG4sKqfMramqkiSW4LkqZaxjpzxvDbTiVBpmrYVKqgXtuFGdb8cJeSHV48oaTKjpJKL8XZYleVlSFCVBp+0IGnBMeorSQlE2/W2/OP7mYzgcoVBkWeEgaN6pE3tQFTk6kN4bT7Ah4s8WrKGsmg0ZYJQVBGFI6UQS773rHN3ur/O97z3DxsaW2ILAsYj6xJeWZvHA9V0AVC754Jm/ageaZsvABevK/+gPJQk07ViFytI42l2DtZqysqyurjA/fwRxVHRDvkNKmWd8dO0q71/6gN7BkDCKiKOIM+dO83t/+/fY3N7m+R88w/nzp7jj7ofpFwGLR0/zg+//Fb/9t48xOTGHtQHvvH+FwML3lr/Pi3/+Ax57+nG+/tu/zoWLD9JuJ7TSmN6BpXLitlVZcf3q9Vp7x1cEJKihXjPeEbMeZtp4wHitHp+5H/O668BS1q3flWWvMbWzbF3QUae2nePbsDfV53NwNl8R8A61O2sdkGBBCmvecVTgGtr93iB3a+okm5zEp/38s3cVIOtZzvwcMa6y4fpEagh9E8T4PDtUYxVB8YFw+4AEQ54AQT5njHeQtZuOFipbV4kO+2z+euSZlT5wsTIWvum8Ccrk/oxrSPfJNHfT7ne5B0EruPtTQkbjE+rKysjUqD5AKUcEopSLT924Ww+t8skLYeFsxleecRzHY8upjiQxFlZurVBmuTTBa02cpBCG9PdL3n3/fZ576VW2dg5AaSYm53josc/yhaef4Lt/+m+5urrFnQ88zLm8Yn4i4Tvf/R5hlPDwY48zyireee0t1vYO6F+/xVs/fJazF+/ll7/xDS4+dJEwjGT/MhbjmEOz4YCVjc3af9Rj89rYyrmYeqxS6edfYzOa/zr/3Y3XWEqhhlyKD6KYmZ7i6Scf5Zc+/wSdduh6S905jPgQldOe9HDSyjOIGmGMDSK5l8plDDxCyRhDXkgCoyyruhjySY9PqWOj64ysGHGZoD4aqzF6brP3JfWfnvjUv/ulKU2B1LCKovBBkB5T0RUmjVHvgK2V67z76ksoFaDChG53gvkjR1g6dpKTZ09z/NhRZo8d48SddxC7bGaRDxn0Buzt7XKwtc7Gxgprt9ZYX1tjc2Od/d1den3RzTFUlDmgSvdAVWMfHfFAWYqGhNbycHa2t7EI5K3bmRBYUuXYi+r6qUycuqFxTE8Emq/wZXjlekCaYLGp2FRlRZZnjlVJk9Pjem+TG1ff5UffS5icnufk2fPcede93HXvHZw4dZTpyS5hoIiCkDiOSdOEVishjsIaeiSGoyk6++clt24xBCyev5Njd9zJ5776Vfr7u0StLkm7RVZWbG1vi+PpPv2zIGlNFslpnChV22Nbj5MhK3JQiivvv8kvf/O32dy8yeLRJWni1pp2t02RD0lbCVprRqMhZTkiz0eAc9K10AGXRU4rTflv/sk/5jP3XOBP/uiPeObFV9joD7DeUVWwtb7C2z95hQcefZK1mzfRWBaWjrCzvcne9gamKtBxwsHBwaFgxN/XeKDieyX8700Fxi1kZ8C9g67GG/uVInRaSILjFrhKFCg6SUK7kzDZadFptUUgLApIWglhoB01rOg5WUudfQqDsHbElA7pxLHMeSOwGpSiKExNQ+wD+MLBTuM4ris4xhinLSX2IQwD1z9imkqtilwCpIFoRlFMd6KD0lBlOZ12yuLxORaXlpiZm6c7OUXUalGB06iBsnJJB581pGmYV0phtTACah3UAUKgPOBMM8pztnd3WVlZY3t7m37vgCrP0E44zGYZ0dj9ZVXJaCRVV62loiP01Q3lrFC0+3I6JEnMcDAiSSJGw4FLjAipgU/uSKAZonVIWeYu2++YJKuGrl6gTpY0SqVC5GC8hRKYBKlyDe4CBa1MRVEaisox7/3i+ERHEiVEcSi0xmFIXhTCWuez63GIsb5iisA5VENyErrEASCwS4drj5OWBBfAPRfOcubkCa5eu8Hb777L2vo6/V6PLMuoyorKzeFKVajSOjiJ9229c9jQyVvnJIu99l6JtznUiapslAvLIRarrat4iLP40QfXeOj+R2UOKUOQpAwGI9599z1+/NzzrG9scPTYEkksfQY6DPj13/h1dg72ePH5F9nvHXD5/cv8w//6/0QQT/Df/3f/HZffu8Sf/MEf8ORTX2TnYMjdDz7Ov/wX/zPzEeyub/H9//AdXnjuJR770md5+itfZmb+qDC0IXO/MoZLly47X10SGAqnI+JQIO5JNAHCmJNqmlJGnbRBgSktlqp27oVtrHGqD2cdrROfLMfOPZas9V/rKgF1FcdSi05K1t8IyYctJeBQzma5xExTFfJf7n0M639rfCblKxPeyTb1vlJrkCia5GjjQdT3UwtpuqSPTE6XybeeUtohN5Q4w1orojCSZKKWfhnrLkzeq+uqSJOXdX1JGsRfkfv0fpSxTWISfz9+UitbB6ZaSS+oVLzk/rTyVSPrxtjHKpVjRZW+ImPEJhpVNqM5Hv365wAoB0pueksUlYWsKER0OpCATuiupRK1t7fD2uYmx44fQYcR2mquL9/guWef4/JHNzjo5ygdkLZbPPT4Y3zzV7/EWy/9iFffeJuDkcCif+93fpv/4b//P/PhR1cZlZZf/9a3eOorv8SD993PX/7rf4caZmRFyUs/+CEvvfIq9z/yCF/68hc4c/pULfaONWysrzmNxMan8nuSxOiNv1ELwxoj5RZ8QNHAPGs4oHXjrQVSGChNu9Pi5ImjPHDxbh564F7mZ7soW2FNiVWB2AmlKasCayWwUUo7+maZY4GSyppWQijkk5QesllWbj83Qq2v3Nz1xYJPcnzKHpvKbcj6YwIZoXrzEAyZpw1Mzf/zbC9eqwKoGcmMtVRGxMjKUv7ms74S3FinGVE/D5nEWZ+tgy02bl7hnZ88L05XGBKmbabn55hfPMrJEyc4deoki0ePMzs/z9FTZ7g/DtFKlLzLbMSo12dwsMvO1gZbG+tsbW6yvrbOzvYWvf09Dvo9smxEZQ1RWDRECsCo32O/v+NoWP29NmJ63mFWY4av8eJdIKGajFJzND/XC8/tXLWBd5kjhXbGB1QwYnu9R9bf4cq7b/CdP43pTM1y8vQ57rjrPHfcdSfHjx9hcqJFGMjijeOENIlppQlpHNXN5j4jhN8MlDzz0lpUq0scxORFwf5eT5o+x9IAPwt61dydvDeKozraV8qr/cpG7nsrBnvb7GxskOc588ZpdTinotWOKUzA/kGfVhqzsnzVMb54rHVBlg1ot1qcPHmCv/t/+HscWTrKE7/yayx/eJnv/cW3+bNv/yXvf3QFSRQYdtZucv1ym3Z3kgcfucBw1GJz8xK2ygnDgIOD/ZpAY/y4PdA5XHEbg5U5Q+2DXk9THjjGNl+ZicKAMFC04phuK2Fudoput00YSjk3jUPKQqoMKBGDjbwh0brujYnjuM52iqqyZjRyjlVlqMoSY6DT6UilwZWxI6dBI/BRXHVW1YFLLdgJKK2IHGHHeMWmMiVRFFJVuoaslUXB5HSXE3dc4PiJk0zNzBF4TRcsReUdFFVvtuOhv8+Qeqy69sFhGFIUIhq8s7PH1sYmGxvr5IMhpvBUzQJx81ml0WBAGAkcr9frkSSJvFZKUGWMVEO8AU5bCVhp0q/KkjCSAKaqqrqSFdUsbcWhOQBIlqoyaC1CeVC5JIl2VbBQnFulUJUhCSPyUs4Tx3GdXFIKcmOEgKGqSCJZNUn4N6+7XxzNEUaCZfeCinEU11pmFkscJ1grlcyqqkArkkgoz63RVJWj3EX6oOK4TVVJ9dJUYqejMKLThqX5WR57+H4GoxGj4Yjd3V1W19fZ2NxhbX2d3Z1dtnZ26fWH0jBbWYzVVFiM2zz8vAduE2+uE7H1UZY5YdihKMvagZGEqeXG8i0GwwGttENVwPZuj7/8zl/xzttvSVBQGra2djh29Ag6VPzyN36Zoix45513OHXmFC/cuE7W7zEajXj7tfe4fvUjNrZ3eOe9d3j0iSdQ2vAbv/4r5EPDf/gX/xNdaxmNcnrX1/iT3/9TfvSj53j6y1/g7nsfJIhSLLCzs8ONm8vSYOxZxKDZOOtfVf13STw0iUdLhULWWxiFRGEkTIoaJz6tUTXUukmugQ8SG1h4LRjoYxEcLAefrHJBkP+sEnuFqwRU1hK4HDr1fiAJLQ/3kgqdh81Kj45AZEFrgTuWZYXvqw11WGvzSWzXBBVY7a7ZJSIreR1kjEwl3yq6M40z6YOzmnBsPHih/oP/jTqQ8j/78VG+whbIu62jBPC5KF8Bq8aqY3XQIfTYQnntgixcAKgceYOPIOWO5B79+5rJMjYPxt7rgkZPC63AQbtVPbVq9mgXNAtM3Qey436t5p133+PEsWMUVcXe7g7/9g/+mIP9AaOicklaxX0PPsS3fv0bbC1fZXNrm939Hnv9nBvXVtk5v8GVq1fp9Yesr23x4x89x+c+/zkefuhhTiwu8W/+X/8TB8MBe6VhlO+y/cyPeefd9/jyL32BCxfO053oYo3hnfcvOaa+ZiYrhCUY72sw9nzGb9Hg/EfPEixry5NUaBStVpsTx4/yxCMPcPGe88xOdUiiUJI8DipqlXYVbOrkqXVzWwWWMIiwtnRMcuKrxIn0oCogijS5Qx5IMgBXJXXtHYEm+N+qx8ZvzOMBzSEYjssgVPWC8wxgDS5WBlAyl1Ul1RnB0Mlgy99LydCWFUpVdRBlLWMlam9snAFyWTRv70yeM8oGrOxtcuuD93jdNdsRRiRpm4npORaPLnLixBlOnjrF8eNHWTq6xNFz57hw370kcSRwlEr4tMtRxnAwoH9wQK93wObWNlubW/T3e+zu7LC5ucHmzjYH+/vsHezQ6+3RH/Soyqaa5aNhf6/jfpryjZ1uatbZQrwzZOvmMP+ZQIcEYUgcJxJAebIApeqTR1HkIGEGsj1uXH6T6++/zV+HCd3JSY6eOMa5O87y0EMPMD07Q5JEjtEOAhVKJScKCKNAJpaVDFdVlo5SuZRyrzPSeszY3d5fcrjXqNmctFJEcYq1hqIYico65Vh85LNbijd+8jyf++XfYOXWNU6cPlmPU7vd4spVEcFcOn6EjdWbdWN9mrZdL0JAHCU89fRTxK0Oa9u7mNIQTC3wS7/9ezz51V/h/Tde43vf+S4v/ORV9vsDhv09ssEAbR9jdeUKOxtrHOzv0u206fV6h5xVf08fCyvTTQnXV2YCFQhtcuBfF0HMJNIkYUinndBtpUxPd2h3O0R1/5YIWGktzeooTRo5lXELOo4oi5LJbkJe5Cgl0KTAVT6NqYiiEKVcn4m1ApvWYd1sPK5Lk+cZxljiOKyhpp4IJI7DOpup3I4ozmAsUCkH3fIMLkkSNLCdICDPLO+98wEfXrqGCgLanTaT3UmmZqZod7p0pyboTnSJwsBVYmxtgL03pxDmpr39Hjsb22xt7bC1tU02OKAyliSMpOpjyuYZaI31GUyl0EHgGPisC/okaPHVFD9uQRAQhi4Li8basg5GyrJ0VWVT921VSKUNqyjyss6yGuXpxG09N3yQ7NdNFIagFEWWNc/bCizEX5dAAm0NtZPnlWOjX5AHfOLDb+I6EJgm1BliUxmqShxHYyqpAGqoqMQeak2oIAoDKltJKsZoVGAcJMmggxDj4D+2FLhikoQEYYtut8XJk0flu1ySa3trl82tXTa2ttnc3GR1dZPNnX22d3cZjTLyvKR0WnJy6U2mXeyJrZn0jDXkuQTBlXfSEST+fm/A+5cv8cD9D6KTlJPnljh75zqXPryCLXN0GDIaZRRlybd+41uEgeajSx/y9Oc+zzPf/T5LC/NEodCrf3DpA5JESAWmZyb5ySs/4Wtf+xq3rn3Af/73f5fdjTV+9K//vyLXoCyDUc7B8jp3bu6RXLvB7MwsSRTz+mtvkg1zqrKsK5Hea/ZVYuUdVHVbxrnyRArOebNi78piDAKvmuScMHY5pxuDUQrtqY4RHZB6D2o+Xh8egjb2lzpx27zTUrj/NggNDkWgjcPu9zp/z/65qvo1H1p4OJr81vSHgNffkV4XVSeB/XW6Hlbre1p0HQj6a272bf+7HQvWfAJWvmP8c/U4WZdodevKWn9+6eGQ86vxT4x9FiFccJ+XwLNy3+mCECr37Kv6Q/bQOHqYnO8pkWeK9WvFYGvKdOq/+5qYCG0rbFViCFzFRrvxFoY9sLz//iW+8LmnmZydZvHIEhcv3s8PnnkORYBRlrsvPsjv/u5vE9qMW2trLBw5xvbePqiYQX/IztYOQRBRFJbNjS2ufHiFCxfupNXp8vnf+AZp2uKf/9/+74wOBp6TmYcee4qv/cqvoWzG/u4Wu70B773/vku+VWDdOogUoyxv5pgffj+KPhK2EkzWWmpKRnhissP5U6d44L57uP+eO1lanEHjoG24BI8LVCXJKfM01A4d43vjlASCZSHakcr1X+VFiXX2ylhb045XxrgqnAuUcLTvVdPW8UmOT9ljI4xI7XabTqdDryfZmhpyEgQ1rriGKVvXmGWp4Sleh8bQiOBVVeU2kapmnanGlEZ9YOOhJw3eTsqOpv4+9x6P6xvP8qCxxYhiuE9vZ42VK+/wOgGKEB1GREmLzuQkc7MzLB07ypFjRzl67DiLRxaYnZtjemqKyWNHWUrPcrcPGIyIEI4GQ7Z3dlhfWefWrRus3lxmdeUWu3tbFFlGluV106mfVEo5/pSg4WoXzQ7JroRKocKAQId1w7bvbQgCp9oaxIRhSBxHRGFAFKfoOCYMRSTRa6hESUwUxiKgGMq/QEvFo7SGiU5CORwy6h/UxsD39HhH3FcTQi2QKKlSutKwWxRmnINeNRl18E69p92tOOgN2NzaZnVljbdefxWjNGHkA4Wg6UEAlJLNZm9nnZVbN4lCGB706Ex0UWharRbD/h5VWTB5zwXeevUmhSMHaLfbZKPcwQssX/zSl9nb7wnG3RjyoqAoCrLcsnDuXv7WPzzPl3/1Js888wPeefd9Hnr0KfYOdli9dZ1Rb5dOq8X+/r7TabpNR+anghmXENCORUQFjlZYKhuh1ihraLViuu0205MtJieEdtlaQ5w4Uc9Kmu5F08JKRgVIkrRu7g/CUMQdHXRLBwExIjrYbrVqJzrPGxiXr8CmrZRslBEEIY3TIGxPURRTVYaiMBR5UWexRJDWkKRC61w5JzBJIpdgDQ6NQ63NYSxRJFohxlj5vBYRtUGvh8ly1lduCh5Xa6IkJk1S0jSlMzXB9PQ0ExMTVEqxubHD1sY6ezu7mDKX/pw4oCotyljSKKIsc5eNVw5K5hkbBcalXDKmMpU4nFqT5XmdkVXaQc20b+aWviNMQRTHiDp9JQxsrqplK3FMtJLMleijaJeRDep+Q+Ww46PRkCiKa1sqAmWlc7g1eZYTuopQGAR1pjl3/U5FUdDudDBVJVXjMYbGXxx/8xHHkQTieYkOQqw1hIHzBIwTbNSaKIwb+BkR3jGyFsI4dJl5gY4Os5HQLBtJGhSVIXTQDx1YjIJukkpvqZGm98DbeA0Lc9M8mNxFWRniOGE4GrG5s0+e5ezu9bixvMJ7733AR9eu0esPxG+z1iWlLO1OCxEOrGi1EqnYuGqTxWJNQFlZfvLKq9x//wO02l3CKOSBh+7n+rVrvPzCjwkDmJuf5W//vb9FHIas3LjFZ558mhvLt9i4tUK3k5KkKVlRYG3F/PwCW2sZGsXG2hq7O1vMLR6jP9jn7/6Dv8PB2hovfvfb9FVFriy/+qu/wj/5p/+U6dlZPnjnEu+8/Q6vvfEmIGQg1viEia8ylVhbCoKmDgzGq+ON+944vOPOkK5bWgUVpl0waamJB5Rz0twnBEYu3yNnVGPPXTVC2P6vTv+muRJ/Lh+cNM3c/poPX7KtmSKVY7TyCBD5mPSCYpVzBg0oqXI0dL0+gWrqgMAadxXaohyLmrdbqLHKjKIOOqzxemxNNcB3+tS/j9l1f191LFoHTM311MUZkMALHJOcbYIdKxUa6wIJH4zUAZQd+90XalxwWs8CKbG4axkfY1exqf16l2iuAznfo2RrmLX32TwywGAoyor9/R4/ee11vvr1r4AO+PwXv8iN5VWuXLnOkeOn+Tt/93c5OpPyxhtv88CDD/Knf/RHFFnB/NICBugPhxw9epTLH12nKCs2NzZ5+eWfcO6O86ysrfPYL3+J/8KU/D/+n/8jeZZzx70P8s/+2f+FE0enuf7Re7z1ep8f/+gd9vZ7lJVPpFCjdqyDkCtwLGSaQDWvi+9dkZey78/PLnDx4t3cdf409917gbnJLmEIVSlkKIHWGKMpq1JsmXbP21hakUB4A62JglCkTozBVqbpu7UG7bjqYkXdHyc9sIHbx2RflD1O5rNHi4z+t4Ki+WaqPM8ZDB0do22wm1UlGFZftTEO++kNRVWWTgtHJk1p3AZsRHVXoF3GnUeyXY34lI+km2DIT0ifRbaumlA3QFlb41mbud1sRqpeZApKRZYpBvtrbNyES2+HzqmPCIOQMGnR7k4wMzPLzOwMs4vzLCwtMr+4yOLCAgvzc0zNTHHPkUXuf/xBYWkpK8qq8DzZErE6Q9o4wDR9FXjBxAai5LVI/D9cWa5ONljJL1mLE0OyDtJnHITPumCzoqqE/aYspWJWOEHTsiwZVfK6KUuB3xhpOnPRi1QYAmHKCUKpGtSwKR+guetV7v3KSD9Cfzhkc2uHldU1bly/xfKN66ytrrG/vUs2GJGXA4p86Jimghp6PO6cCXzLojG8+fKP+Opv/C5Xr77DXXfdSRIEJHHEqH9Ad6KLMYZ+bx9h4gpI220Ggz5gefChh1hYWuKgN8BXHws/BnlOPiqEE17HnDx9nqQ7R9Se5vLrr7CzuSZBQyD6QUHoGnTHKjG4nz08S7m+k0CJMQmUMJXFYchkt830VIepiTadiQ5lKc6PCoRVqyxF7DIMAmINURAxyjJCx5TlAyZjIAhC1wMjWWfrGP2whiRt1QYjCKTnJnLc+bjsiwQq0osTOKpgaZQOwRYoJfMpakdYcIG1riFsKgwxpiRNEwch1Y4woKrXpnLZmyCQfqcwFAKBINLEceg2NEte5ORFQRxHoBWmKjnY3aVqp6yvrQlVdKgpkT6zyLGoaSwqCimKDI+brkwOyhKGWgIzUxJFUvnRSkQPPV11GAXC8lYWoHBMZIVAyrQiihPKqqyDkjCKXHCrHK7bUBlLHIXgoJJeTFFpx2SmFNpTyGrtWLEMaTuV/iYjgUzkBF191jMvCkIrDdwiVGxqyJq3amVREIQBcRSNuUu/OH7e0Wq1iCOpXnpIJY5RKYxTqbYYS1UK1BrlRWA1KnQK38ar1YvxiqKYKNTO5lqUY+CLgoAwSWgHToNJa8rC7Q9IpaXVbgGOur0sQZV0uzET00ckuZgXPProXUS/83Vu3trktTfe50/+41+ytrpOFFi+9Ztf45vf/DoocUL6gxH/w//1n7O3WzodP8lYWwPr61u89dabPPL4Z8gVTE92+NrXvsrW1jqjwS7/+J/8Q4IA9jZ3ufjAQyRJl+ee+XcEgQT3STJLWRmOHT/KaDRk+dpHtNMErOXlF1/knvsfpj/oYy38zn/191nZ3uDlnzzL4088xj/5p/81SZrwxquv0dvr8exzL9Dr9WTvqnCaMVKNbMUB3/jaVwiCgBvXl1nd2OHKtWVA9gYRL3UBCr6x/nBwb22jk+Ld2Lo/haZfQ4gEKv8O7zE0lWnwPnCdwDX+jJY6YPHMXsb6BKAXqHTZ8TEyCN/v469DI7bQeLkQX3Vx1+ArcoIQc7QH1sp118iIsWt294WRiohylQ+ZuwFKGUeWpuq7sRiscexvyo+TwRoP5XIELmNU2tb6Ktvt/UMuqPBq9YDC1P6Z0s0I+ES4jLNtzuH9ufrbxipFNVRu/OGOP2eaQM73pmkJ1cZ1c5rwTPo/yqIkDBVo1+fmBDNx/u1LL7/Mgw/ex9KRJRYXl/jN3/xNfvT8C/zqb/4654/OcuXDy5y78w7iIOCNN9+Q0NBBqwtjufvue3jtjbfZ2esxGAxYvrHMm2++yYnjx+j3Bjz6mSf4P/6j/4J//8d/xn/73/23HFmY4M3XXuL6lausr63z0ksvYwwOJSVX732B2hdTnoVMEWhLoEErQ6fT4uSJI9x79x3cf+8dHD+6RLedopWBmnQnwLhkiOMcQpeRG1OLVULMowPZd6qyFGkIG5A6xJavtOpQGEytFZ0wa6TiV1alQ4woWq2UIi/GZA5UHcgmScQnPT5VYJNlmWNiCoT1JS/cRHQBiMVtug38qoZgGYuxTmncWsrKOjpogVYYM/b+OgyXwzhmBfxg+iBnLKgRgzCundKUS2X+j2U9nJFoMguyoI2lzu94J1WhyQA1VBzsKdZvBpLNVvJfpSOSuEXaatOdmmBuYYHFhSXmF2aZnJ2m2+3QSiO30QWEOiQMgzpQEAdHYeumOPyKru/BZx0alhefabB1hcpYEZYzbtH53h8cB3zN6u7GQrIP7ud6pG39u9gfZ6jH+kMC56jrMBLcYygZgaIo6A8G7Oxss7mxwcbqBqsrG2xvbbG7u0M2HFCWucuElJKh8YwZtqrZM8pSMuQ+wzD+3A2y4KrhHi8/+wyPPvk0H35wmTvvukucOVOxsLDA7taGfIdzCpNWi/6wII4ivvnNb9AbDFGObrAsKvK8oKxK8qIkyzP6ewcE2YAQRWtilssfvM/qyjWqIqPdbrG1telYw1zVzZ1L4ELBYQ0ZIA410902s1NtJie6tNKYTitFBZqyLGkliQSsIYSBjLHS0piOVcRxRJaNKKxUOnUcYauKJJZqzWiUAYowlEAiDDXWSpCjg6imrayqqslOF5kjE9CuGb7JvFVO06Yoc0ICgjAgQNc0ylXpxTlzwiAiDJ1ugxaCgXa7XTfKC7xK5rtcX+lIDaQC2GonzhlpHBHpLREGHYNhNCjQSjEcjgijCKwEP2hNEGoMFcYooMIasJUljD2kSyo1UvmRa5b+H03uWOJMbokTadQfugp0URS0Wi0Ct07CKJJ1Whg8gUJZCmV14GyLVFRlQw9cEzq2Igg0ZV6glKYoS5JEtH80svGUtiIfZKKbYBRZnhOnCZXDIqOhO9mhyAswgeP5V7TSVKqNpTxfYy1VLoQmef4LuudPeoSR7GdJoF2lU+jShXI0QocpWMtwOKDdbjsnUtXQQK/+XZaV82JBVwGhVhRIBdGTgQixhsxRz/IURmHt2NZ7nRJYdpJEEkhVFVEgc51AtJNMnnF0YYpjX/0sF+88y/e+/xzf/f53uPv8EscWO06PKqG009x//z388EcvYEorGHYHLyorxbPPvsD5c2eZmlskjlvML07zj/7xf0WkK6pqxOCgx9kLdzE9O8+f/8e/5IMPP2BpYZowiJmenqGqDAuLSxRofvi97+L7kT64fIXL777JA49/nrwo0YHh7/+jf8DsH8/wX/7jv0dl4J233ycbDLl0+TKvvv6GCCIXTcUx0BAqw+c+cze/8fXPEIYi/puXht//4+/y5juX2N3Zw5qSkmoM4eUdYqe2oVQdxAhCxutiyH5nXJbfVN7LdQKX7lko/7ppggbvWnjitRpFhgtGvEftv9vCuIaMNMnXYZO7Fi0N6hTOiRe2Nt8H4w+loLLUbGA42Jn0zrjKQx1AjPXGWCtJqtoVMqAq7xbU11LXkrxsg/Mx63M50VAAW8hAeJ9CiAisc0ipx6pxVD3Tqa2v1xhBNFhlHPrGwSyVC5J8NDoWDLoLYpw5DhcsqRpu1zh5tq4EuXH2jfF4Jjs33tagjIPcWUNVOcfeE9EoSSaasqLf6/Ptb/8lf/v3fpe01eLchXN8/ZtfZePmdW5ev87U7BKTk9O89tyPWFvflL5grUjTmDhJufuuu7hw7hxvvfseZVWxv7fLG6++xuMPP0x6PGTUH3Dx/oucufMCjzxwntdffp5bN2+SZznf/d732NzecUXCJnkYxSFJEtFupzViotNuMzs1yX33nhMB3tlJzp89yeRkp24fUECZDVyPr4OJubmUZSK+K3O+cvbDEGjpw7aV22eVIsszhPY6cGLHUU0+VBnp06/Ksg7eRS4CFxM4X8L5q6YsfSw/5qf+/OPTAbFdlhFszQzkpq172KbOinjMHjhygLKpUnjWJAs1T70sfLeAja2Dpdt7NJp+myag8UGL0NSNlTLGPjd+1DmYQ+8ZfwW3EBz9oL99H3BYaHpfFPlI0TtQbKwrrn4QOJFEEQNMkphWJ6XbmaDd6tButWi1WiRJSNpKaaUxSRITxAlhFBNHEUEUEwQhURhL4KBU3RiuA6kkSa+NwNZqK+SqArdVuN39NRmc+jZc8GNs1YxtZV2zt0CayqKgKCsRMM1LRqMRw8GAXm/EoNdjMOixf9Bn2D8QWGKWuzlQ1YZdgrNKFLhhjHmjeX18roQ1taomz7NDz8kYWWor1y7xwfwxThw/ygeXL6N0xYnjxzl79gyvvfZ6vQi00rTilIO9NR586EGOnTxDr58RBDJwVVHVfV1lZdjd3mV9bZ0jCwv07STXr7/LzWsfkfUPaMUJw+FAGsQdi5sPYIIwIHREEe04JIlDJic7LM5OMz3ZFUVujKuGSLCZ5TlxHBOEEVYpIiU7VZzEDIcj18AeSQm4NASRJklSjJUeC4OtGfqiKHLQMMm05K5s6+nBxUlKpKQ7ymi1Ejz1qPRqSLnXkwEYU6HQlKVUTqrK1qxqUtkxdaXWfybPjftuwfZ6YxbHEaAYjaTfJ05iF+AadKDJsrxmVhPYnRCIRDYgy10DpFt2vYMDCSwiCRa0C6qiMMIYXxWRcyot68YYUXcvyoIg1A5WJ+QGo1EhucXSazMAOPppp49XmRKlhdbd369nN/N9Lv6eq6oSIgzrgx35XqmsqLrq7Tdt6TfSxGFIVVq/jAFDFHudGt2Qkhip+ihUXTny8ChrrRtvsSm/OD7ZcbC3zygKhRnPQQE9o1AQRK7ZXCBpg77oVIWO3lskCxzm3E1S//k4jOoelzCMMNZrLLlqvcsYay2OQTkOw0DgWKHLcuZ5gdIQhdLnJv1aUi2O44iZbsiv/vKTXLv6PmdOnWR3Z5coCsBY8qrgy597nBdffJVBmTkEhaGkgAJ2d/r85V9+l9/+7d9CK83szBSTE5Nc/fADysKwsLBE2u6yvrrBn//5X7Cz22PvYJ9uEvHEZ56kqiomJic5GXfp9Q37e7fo9ftYW/LM93/AhTvvhiBl1B8Sh5Zvfeub2MqwcuMWVVmyubnJn3372xSFwLWr0lcdpGezk2iefvIRgc3k4lckseIf/N1fY2vvgOs3VukNRkRhxHAwcBUfsetpmjIxMUkYRmzvbMszw/U36oAoCFFoikKgyoJACERfQylaaQtjDIPhwAWblfTeuUpyZWWdxklEf5jRbrVQSlHkBXleShDpzpuNRqRJizhOKUtJqFWV7zEWWxEoR7wSaNf3JyiPKI3JXGUvThMhkgCoKkKt6yrPaDSUjH1ZUJWl26cs1ihaSYskjmU/sYYoVBhbMSoyOq2EsjICsy2l+mhdAnTQH2CsY6hSiv4gc3bUVebDWOx1JHY+y0YMR0NaaYsolF5LpSBNhXCjcLbTM00aI9AuqzStVopStu5vjKOQLMsEih0EdS9hvz8gTdu0W21GWSb0+mnMoN/n5q0VXvrJGxSFb1FwgZWt40wXeCqiOCJ0ZDeihSP+nuxNzt9QAV77x1MPSEIZbGX44PKHvPDC83z26ac5f/48/d0NVm+uEqVt0rSFyYesrNzk2LEj7PVHxGlIqxUSxQlJp8Xf+8/+DoPeFlpboTv//7P3J822ZNl5IPbt1t3POfe9FxEZmRmRDQs9wAbFBgCBokjQWFUsiRTNJDMNS1YmyWQyTaSZpJkm0g/QQKP6CZKZrEoUJbJIiBQICmBTAIguASQykX1G+95tjrvvXoNv7X3OfRmZiBjQShKep2VEvPvu6fy4773W+rpvfhv/9b/5V3jjtf8YOSeEsEOXjH/xz34J3lnkkvHrv/5f4zd+63eRq0KuGaXIfmM0/s7f/lv4O//RL8CaOgT31jnMzgqKTDqnsRoxZzjrEUOEcQ7GdxZGgbJK2EMWKopupkLqUcokBjggtXdDpVmH1EWxRIQQZN+rMFpRdw42RaoptApMs2dmIdhk5sprxoi7a33EuvqTj0/U2EgLMTJr6LIhjUzuHuhKqC1tTH+b7NYXZ7QLGgD0SYq8hggur53ULmYBjzUzLzcsvZ7/fr9zbXZwLWSnyOlxQ/QI9xnPIaI6aYKuYdKOiPTJm1IKiA3rCjz/sDcWl/cBddGtECK0MNYQ0rOWk39rqYEwBtZ6JsqbrrUx0H2D7DCw/JPTEHGWEMeW2n3JW7dCrIKUVdSWQUMHIibUaCQiQEIPJGXmYlwwPk2fZvVXz4WIt3CJR8P4UgPz8pf2qHkpnG4Zq7/n7wAiOkY1fOnXfxnL6W/jjSdv4P6Db+A/+IVfwG9/6Q9w9+KWepNaLvTJUPB3/7t/F+ctACDNrzW68JVSkGPGt7/7Dr70B78PDYv7WPDOd7+Db339y/jgu9/A5D1Kzdj3Fd7bIeA32kIBmCeL158c8drNgtefPoFfPOlL1qJBobQy0C6lKLh/7XRCjLT7RaWeZgs7tNKYPKcjPbzSCofVe49WMlJJOB6Oo6nR2oyNpjtzkQ5nxV7ZjaHANJFapQR11LqwqJNVf1vDgH37dNIai20PsMYilgDvHYyxaDUDtaEqPkcXwHdx+/F4FB2eTMPERnUSF7xaq7iwcUobY8LkPUIvFPt3DiAlutEpzaZOaYOSK4wBck5QijxdpTX2fYc3k6w1pHAq3alGCs4r5FQ5hRMu8rpvtPe0FIv2Ajau2whBbaDInLRarlNdC9YpZNQQ1tFwdIetrusZKd2yydbaYJSG0kLb1fw8OVEMqhr1UynlR81ra7Qy79Lo3mx1F8lXx8c7YoywVmPbA1Sr8I7FGJRinoemfalRZtyHIUQ0AN5boZsVWkQDKPLYUOKgjOZSpTBiPkOtleYQkObEOcBYhBhZ+MlaESNFtyPUGoqvXfuEn8VHShnWNPyP/of/A8zzjFoKziHwWlPA5z/7Bv7iT/95/Mqv/ms+X8YQ2idt8eUvfwO/8sv/HH/9F/9bOE2fwje/+mXoVrEsR0BZ5FTwX/5f/+/M4UGDaUDQBZ/+7FuopeDmZsLNzQEwDnd3Z5QMlKrwG7/12/ij3/89vP35H0bMETkn1NJw++E9lNa4fXGLv/8P/h/44P3nSKkgRUFrxPa3tYo//xf+LA6HI+7uHgAFLNPMwjdntHjGFz/zBM69wb2wQei5LISaIGjWGOCLRJdqA5TRsMYxS0NqmVK4z9VSMM8zYtoli6oAymKPEd0Zcp4sRlCo5j3NBpTDKt7XEIUIv+OcM3LKyPJdGqER55wB1SQ7hntslsbKNOq8jNFw3lAv6y26WROdG6lV7HsegQ0248ooDo9zxTSRIthDfAFgD1HWLUsdFhqgaTpUkkQCVNDdTYZE8zTxXEl4eC0NubKJYrhtgTPc97SiKYs2omeUoW8pHBYRFWPDprWBUkb01dxPQkziCmoHLR8AYooX4bmgCtpQM7WHgr/wZ38c//iXfgV/+JWvIaU2mrRec3ZcqtSKw7QMwytCUwwIrVWhKjacnRWvANqmiwU7ZWsN//Sf/jKmeYa3Cuv9GU1raGVQU8T9uuKv//Wfw8//wl/Bw8OKPVWExuBmNy944803cbA7jM44r2f80R/8Hp7vHj/95/48bk7HYdR0nwIOy4zf/70v4R/8o3+MPVXkWoZBlYLCG689w1/7+X8fi6cDIASdQkuouSIWXufGKDgYhD0iaaGLGUMmRAOs6EFDCKglMHYjBHH8JFpnJOqA1xT1VCVnNFOhysX9mHvSRV/eg1FpPMX7qddBTWpTANLcKwkbp8704x6fqLHpm6kxRoouyVgBRiL6y80HHwe5wQSu782N6pv7pelpMr14+fio5+0/v3bfun4uXOoivk91ecyf9PiXP/fjH1xQostN0kWDV695eZWrj9TFabxZSr7G2AaIjOFsNrIJXrIUFmzumrZ3gZ7Hs9Dd56XHDOcyaYouLmo8ur6nLwZKoPTuWU8o+uV2CiIGYyHWz8pohGRRuW4o+/nvxWE3SBjamgxOE/IFtWmSEOzsBKMafuOf/1f4cz/3t/DjP/oz+Kf/4P/EYMplhrGv4Xx/h8997vOoNeNnfuYv4bU3Po1tC+Icx+8k54gXL27xta99HX/4la/izTffxJPXXsc3//iP8K2vfQXvf+cbcFbjeJhxe3eHeZ7ZWCrgMHu88eSEJzdHPHlygHcWzlnM8yITVo1WM6yz4vDHIjbGiGVesG0blJLmVhaMZV7QakVRPAfLPKGBZgvdRlgpDe8mmc5PAISrn7uVcRvai5QiDodFqFUzAIyAzSoTYaChZ9Vs2z7+TD2MkcaYgvt58XRlylmaIoVpZp7PsszjO+wDja5zIj0OF2RDkFVrrSBmLA7neRY3O4UYmZszTRNSDFANcBOpKNPkpJlwI0MmpYJ59tLYERmDbK59apQzC8VwToJ+cYoUY4R3fkzh+/W+b7vQ7wjfuI5WKRoJ9PWLxYxoDkXH1I1QOoWvU22dOJb1AVApFVazCXVeIecIpTyOpxk5V5zPZ5RKNEYpcToUYXq3kjaGQXH9PnxlHvDxj26+oBWHTKWR898ApJxJ+6oNFaSsKqMRcwIakKOYN5QE1fSgaBbZJ412SDkCqKBpBnnoAJBiGghRThkpJ9EsaJSYkLQe1yEAhJgABK6Bgq7W2nB/90CXQ01r+PP5PBrb0lj8NBj8wl/9i/jN3/odPDxsXEe0oD85QRngV//Vb2CaPP7yX2HRaTSRy4aKr3/z23j3gw85fNlWAA3GeTx99gwlk+a7HA94+sYz3N19S6ihDcqd8Ed//G289qm30EDNQs0Ft/E5Yi74L/7Lv48//to3UDI1YrV2Ohn3oJvTAW+9/RZ++/e+Aq0VYgqYJw6ZWKBrCaUUtN+akdmy7jsZEFJ7KM39JjfRsageBUDr975vllJgrEFnN1xc53i/l1JgnR1DCKAKPVSjloxlWVgwygA4FzampLMaqXE4rOrIIBSt3zk4uWRm6e7cJgO91hpS5vomBtFQmo1xTCwUJ+9QS0OsFV2OaxSG5qJnhxhjBD2hk1VnBgCslZQGVGPuCBRQJLRHsWMctCpAi0tfBRr3s1xIuU0pjzpANQ4ttRFDa6WQC/NurKVesQqswoEAB740x8nU7LYKYzyc5RvMMbHxaJBhbsMaEpZpwX/yH/41/NRP/gh+/d/+Pr7+je8gZwk3xaXEbKXhvK6jSW3oeiSx41d0w4Rq0qTxu+z1bm1EuNQO/KN/+F9BoeGnfuonmNHSKlIMeOe9DxG3M4wGQnjAHive/NyPoLYG6x289vjD3/4uvvGNP8aXvvzH+PwP/0X89/7m38BX/vBL8O5tYWVwkPY7v/WH+L/8/f8bbh9oB089NMag/Bd+7t9HjWd8+zsv4J1FbY1NZqvimEgWRS2FQ8cUBxgwe+bfKW2QYhxAAR1SBW0pdA1lXh3z6G6OJ8ofAKQQMHmPlAOps63C+onyFFS0KiYC2j4yNyi1oAiFkfqxBlUq90KtkPeCmraPv6Z/si3guvBnh9y1Htcb97V9aS9iu981TxbdOrSIZ7vw9lFDMTpxFhm4Qkqum5zhSIULWjNec6ApsnbgckG/jNp81Oe8vJVr1OfSjF399vf83vfQ365QHzaD3eXjYjn5ke9Dfuf6Dbz8qxe63BVm9FKzNB7bLn/3eIrx0Z+9P0N3BPko569u6TusLJuG05yeXNMH+zS5F10vT5Z7c/Oo0dSK+qfaINHNQ1NktMHsFX7vX/0Szrfv42/8J/99fPOrv4v7h3sYc4M33nwLr7/2DNv5AT/3c38Vt/cR1tBIIYSAh4cz3n3nXXzlq1+G1gY/8uM/gf2840u//Rv41tf+GGGjy9qzZzdoKeHmMMM7i9Nhwluffh2vPz2JTzuFtPM8s5AozHmx3VJZG3jHSX+MAfPsEXNCThmHwzw+a0wJZS+Yplkc78xoALRszkYbGHFnKlV836FgjUJJnNI474mwrDtOx4NwtdtAdDqVyhiDfd/H9whQwDdN09DCdKcu6qzA3JvCYss5hRgS/OSlaeDzJ+HMWuuG5bORLJ3D4TAamWmaBtLhvUeMcaAdfiI/OKUoYn4FP3tuTnLfppRhbXdKpCYpBJoSOEc9TwhZaIf9Gkuwlmga7WAbtKVJASFyGiNYy2mgnyxKqXB+glF0p2u1Yp5nbmiKdL1Oe8lif44uQpejyUZ9Oc8GQEVK/PzaGqHlBVmjMmIIUOC52fedQcO1oZWCmDNm52EkLX4Pex+ZYN12ceB6dXycY98CrKVNcowBWtwFnfdQUKL5KFJsGrjJccIr14KWdStLXlNtDSFGWOtQChEBqEbaq2j65nkixUTrMWiqjRScnFjIxJyZB1EbGpiXU2pFThFo3c0xY9/TWHe2sCOGgFoaSk4orWHPGQ0KMTW88cbrWNfvslASl07I/qOVxj/75/8Se0j4+Z//WWin0IQadnM64j/7H/9n+Mof/DG+/OUv46tf+QN4pzAviwwiKoxqeOuzX8B773wTP/njP4a/89/5j/F3/u5/Gy+ev8C//NVfxfE4D8rMixe3+If/+Jfw9W98CylExLij1Pxof4aiY+Kv/+ZvE0nVbBybuDUaS4Ss1SboAkt9b6lZIklBwVk2iNoYmq84xxJWKyKiAACa7NAcQsOAaB0LOw5ljNaDxQAF0YGy2O5ruDGC3tTGLL5aJejRojWhjbYK74hOzNOEhsL9TJmx/jBvpsnPxWBCCBN6DDk7a4RFbcsy7DYUjBfyhsjukHUKCsN4YOwFKQNK07JcnK2yfHatifhAaMcU+UsNUxW0UaO4Gn+vGkpmVo01BlUBTZEJYq9QayIeNIfosgIta3VIkfrGdrH1Vk0hFWozyGSg61YuhYioDM5y43cZYkIuFXtoHFq1eMnPEaZJu66crmqrvpaS1UH2QE7l4gRbu5ieYZsxsdr8R//wn+D2xS1+4ed+BhUZW274s3/xLyOGhNvbF9h33p+H5cDmEIB2Ez4IE177wk/jf/b3/lP82I/9KA7LjDee3uArX/4DDilbxu/+3pfwT/6f/xS3DxtSqsOYhJwEhc9/9tN46/Wn+MYffw3KQAyNeA0YZeCNaLoUnU7fffc9WKMlp87iXoavRTjYzhrkVLHME4dxrSCGAO8ttAb2nQO3yU+whtEgAOmy1LCBSItcM1rT2KCphlQ5HJyWA+8bVGgNMe8pMFpdGrKSoVSDbw8fe03/RI2NtXa4xnSaWV+JLhu2HnQ0Frz6oom5uhEBQnhVprOdrz8agI6y1Iv/uB5NxaUxuVgcqguKIYVPfylZIzEobwPF6C3PpZj/KFToZRTno1Cdl3/+/R/Trl5WkB/5z496Dy/Tt641P+N3XkKdLs2NNDq9+RsqR3lp1VGQ/vuXAlBdTs0j1KVTk7r4sqqev3H5fAOdAalUKYtVrhhDDI3Q5Z2O64fOaHZ8t2yUSR/qMDt0Q0qRMKb3eHI64Ftf/rf48J138Rd+5q/jP/iPfgRf/f1/i5IS3vn2t/HstRN+90tfhjYs0vd9x93tLR4eHnA4nfDFH/txxFTx1T/+Gr779a/i7sP3oEBx99NnR/zoFz+D156cLu9ftDLWGBjHyf08TYTKm8Y8sVnRCqMQn6ZJJlCcgnoPzDMLe+qX6IDm/TRMNUJIsALHOuvgjeNiLl/MMs8svCIX+XkSTVZTKDFfMmpkc+hUJt5D1KEAIBdfrqOur5mmCdwwtdDXZmzbCmsdRL8PALDO4/5ug3MGpTTMMylT0+ShlIExLDhSzFCa+p7edFy0IXU0WEAb9pC9QeJrUaPTP8u+B2htYa1HSju665+1PXeGi34tDUWoc7R5Jud86nQKcY/TmrbSKUZAwvP6+eN0KiOKyck0TVi3jdQ20YKVQt53dwPqGRrLssj5mAbiEkIYjasW3vpFq6SQcxWaXz8XouMypHqknAVdqix+YkWKkYWeNNTb/vGtMf+0H+f1DGftuDfCeUNDg9ZEE0OMXHcbG/xNhLStcoqbReC6hV2KIeoS5mmGApAUQz5Trng4b0iRusV1W1HFqKOIdjCXjD0ExFSwb1GGOA17iAgx4/7+jLAJNSQxRLdUumet2wqlhLLR1LAfT7UjuQohMUyWGkrqN7QGSlEIUaHB4Vd/7V/j+fPn+Ju/+Nfx5OlT1Ab86I/8OA6np/iRH/4z+Gvnn2Ujt29Y7z+EVkCKEdGu+Ht/72/jf/O/+p/jh774No6nGc47fO6tz+Db3/wm3nv3HdTW8Edf+WP8s1/+Zbx48YB9iwi7UNR6Ma3o2FWKxnvv3+L9D14Ms5ZRcl7N47owX6ONAYwCkTc0WuN3o4ZyPfisdIhqDbI/dVSkO7FKyag1nQzRhoQXALrBACubSivt2lFicYQSLSKURieRCOdBNB9X9UIlncfoS16gMoLXdFOy3oRqQdqFHdHPTJP9sik6pvW6S0nd1Iebl3K+synkPct7aMORjSgPG6K+F2OYFnTXWqW6HpmUMsjwO1ehzfb3KDVJlVwc1dkv9VLDXM1k0bUb0Jc6plubF0FBWxVpdWtohfcLJQQ0WKALbw9/7OYMl+fTiuYcrG/AZld18wJhANWKppllQ/McNr4aF114i/yS/vmv/Cq+9e1v42/9zV/E25/7Aul7y4R5/iy0UTgtR3zja18DasW+biil4uf+6s/i85/7HFKKeP7hB7iDwtMnJzx7eoN33n0H/+Jf/Bp+63d+D1ug1pmoC2sLoxRmb/DvvfUanr/7TVK3nGFj4QyMomVFlL3EaIOgyZrKqqFaI0MBXhNaKVSlUMGhQC4KTSsouVlKtYg5SzAqUGoAlELRgkg3+U5qGTl9JWfopjnwU01qPOA9+Y6dUbzItYEWF1kn7rA0Mspo+SqP6k84PlFjY7SENArM1lGVzgEev2copA4hoBfWWqYdAIVHtXKRMNaw+SjdWKA8mu73o3adB889fyYWcZ1H/HIzofsN1boo99pR4xqt+T5wyUvHy40Ln/r7ozQf1dz8oL8bDdn3QYuuf/by619ocf3TXZqG8Tsv/c1otGQRl5OEq1/uL4jejRG9gUw82DUqpaCrlqkfgNYbm4ZUCKHWJJAuPy0bA2svG1k3jmgX5KZ/zkvTK+ewNgnxitAJ2DYFay3W22/jV3/pv8Dx2Zv40Z/683j782/jh3/qAarueOdduoc4Z3A6nvDZL/x7UNbg9u4BX/3q1/HBd76BD999By0HTg6sxzw7/MQX38Lbn/kUP4MU/jln+MnLpIm6kHXdYJ3FYTmIvqQiRLrAPTk+JVe18rq33kPJYOAaRXGWOqt5nmCtxYsXdzASbhnFAtFoYD4ckGISu1hO+p33qIVIqLUWW2RmxbDHVHogMT0Q1JhpuHtdgnSbFONJwjfpmFZrxvF4EKqaFp4uF1DnDO1ZK4WOXWeS8w4owJSClCq0boPK1zU4NCUw8vpVoHJaki/LjJTS+N6tZSOTU2JmVsryfiDp8N31iNdrz55h82HE6lwoaLUNF7kePJqzBB+WCqXEEUvQLTZcnFQR/aqw6tJsDYcra7HvO/Yty7mldW13KUup677UMGBhg8Xpn9YKfvJQSsPPM1KMqIUObZDCr69hMZJGQFMH0moqSHnJrzQ2H/tIKcNZg23fSOeslehhjLTs1jQDIe++IKWCu9tbOGdRUoZ2FsZkUkhLpbNWq9i3VVBMhegStn1nA5AiPrx9DoAoBM4bbu8fkAtd+WJMCDni7sUZ+74LMgjsIVG3IUhATInIEZTYkBcpKFjoF9k/SaNk8VdbZ0sQFeg21gw7lGITFl/6g6/gnXffw8/97F/BX/pLfxHpfIv7lGCmCc+eHjHNC84vKh5uORhY7+9QS8FP/OQX8OTpgnfeewflW4HC7pygUPD8xXP86r/81/jyl7+KbQ/Y5f/UerLR0JqoSy0NOff64drURyaVff459oU+xMNoIPp21od0DV3fpiS0uuHaZVQpNdAzWTWldhDUYOzF7BL6kPXyaq3vpqC5kPyk8c/Xz9z3aimxx88u9LceQnwZJrbrhzaFLmjn38m+LM/Xrav7WjjOQ38NBelOVO+2ZPjbP4GI5nvwD67mnvJatWuMVP9cl/M7rOL6IBeX+qRdPUdvDK+H0mi9dmuX1710RuMPV7JljGqnCYIv9QnPmdSd3d4Zl+dSit+t6WhGPxGQIb08N1lFDcx2E9F8pV6LvR6ttvd9hysOX/nq1/Hdd/7P+Es//RfwMz/zs3jzM5/GPC+kcR2OaDWPPEezO1hr8eH772LfNrq07itun3+IX//N38Sv/et/gxcv7hFilr0xib6T9EljNN584yke7u/wlfsPAYCmCAowXsNbw+BuRRrz5BgyrSUQXGmNZZ5Y14s+SmveC8YoEmV6MwLu0VqpgbQ1aEwSam1NRmsFWtth1mA183OAipormtJwpcJojVQboDVKYEOsrQWyZN+Jbj2XDWiV1vcf8/hkGhtAOPcNCmY4LuSchXPO3+sUI9c3WLnY+jS4ZkjBamQioFFUhq480TnzhGgpHBsIW7YiKI3c4VVXgbYJVbLA6EKvsfKhlwN94erCet6QHRq9ajIuyyIuZcTlKR+dk5eaFb7kR/zi9/n590N/Xv77H9QUvfza349eJ+/io/97PKRT5B43St2isvvOv/wcHYVR+nFjdmlKxsgJLAzTKGQhEypV6QoDdPcgI79L4aIxE5GaK5g8xiDuQSwGj4sG4nP8+j//h/h1M2E5PcXT197AzWufwnJYkFDwzvMPsX/zm9jWO6z3d1gfXqCEHV3Y1ppGbQVvffpTeHpaEPYd2ho471AqC+7aGvaUcJxmVAu89toBTfQUShogf7qBNcx00poZKVpBdBYGIWbM00Ttiuf7h2JBErYdh3nCskzY1h3z5KBA95t93TgraQyVdJPB9nDmVFMTJfLeQenG4qfw/VhjsUwTthAECeP566gAkRlPOo42o/noTUIP2nTOwloljnX9e2ZoJKdJBYfDjH1POBwnhD1iWQ5suLaVoV9XWTchRDqDeep35nkeGh3vPbZtk4aSxXqtbawrXWPWtGRbgQvwJS2eLnvruoJho7IG1SZuVVY2Ngg9zqO1CmsnhNA5/7zfUkpwzovGZwbXnCKUI2bKPDyc6T7nvNBR2Cx5MMx033dxpKNuiu52dIgppcI5jdYUYopQjeL0fSdK01cjay3iTntoZWhbbbxDqpehUCmXodCr4084ZM1kLhM1Aa+//hqn6rgsdylENDTMfkGnQ2VBNMO+w4hdek6ZKIEySHtAVUDYd2xhl4EA3e60NlgfNtw9PODufMbt3T0eztJcNdqej2yyXBEiKZwKEKSmorSeUwZ0ClQd/7+4lPJjNikU2tj/ur6xtYZmAV5hBa1N+PDDe/zjX/p/4bd+5/fws3/5p/Fnf/In8eTZU+zO4/TsNcR9RS0RORasJSLuK/xyQMsJcduRQsDD/T2+8c2v4zd+67fx5T/6Cu7vN+whIOwBYY/iYCb0UkFwc8pIqbMrJPOlX/2y118OaQOEl981FPKJ5UPKAA51rGeQwvlyHmSwJoYF19sfm598ed2rxkqNJue6+gaUEntn1dA1MhcTZWl0Rg1Sx4uNhqEPDhugtKxxo2OTJqt1jc91g3d5Fz3gkL/Qi/zWL3n5LzqBKdVto/v1LmQ+MQhgf3DR214+aBUE6NpBVlgCeLl2qZfSYnxnvQF6vF5VaVavB548P6O7Gp+jN0hF3nXvZRj8iavzrMb/hu6nPwl67YSBHnXaGlEovs1aFVRH36TJqlJ3ak51EZMgrbnh//2r/wq/+W9/G3/mz3wRf+6nfhKf//xbuH/+GsKIFdhgDcPey/6Au7s7fPtb38Yf/MEf4itf+So+vLtHiAUpZbFLvpjW9G5VW4tcgdvzDivaQLXRVIfNBps6urZusJrUddYEHE7eWT5WGwVrHFql859SDVYbKM2MvVYbnNODGkrdlkGMbkwSrNVwzqOhIaQKaz2MMXDWCT2zQBsCJdBkpGRZ56wTsEI1WA20ksW2XiHFH1TXPj4+IRWNsNDQSWgM2krK+UIV04C2RFyc0Sh9A68V1hW0SnvAPrFWGqT0tIpaNbTO40LrHMdSK6qiY0mffZRKfUGtFU0DWvdsm4uO4zGiIctKw+Ogp3ZZDNr44UVLdHUvjhtsLFXyZY5pAx43O9d/vqbaXaa8j3/voxqTj2pUvr8G6OM0NU0W+o9qhq6bpu91kHs0eRmfRc47MlD6BOfqVVt79Fz9eWrNqDUPxIIQuqBCoAWqlbBITksueTpoQM183iywrLUOYdtgvcMyG2YbPLyL9+++i/e/Tih1miYWfjljPZ/53QoXGQ1C5QJUrXCg68zhsEhGjYY1C0YSdCXEO08ezjsYbehApg1yq5J4z+Cq6TSh5ARrDLnxMcHLzY8m4bS1wToNozTmRSxBQwKUhhUaHWqFMQ7GqhHCVauiLkdr7GEDtMK2rrTI9AzTypkTZXj67/vJo6kLv9wYIg050wqZdscdraF96bbRujnGLBuOEepWk8aowHs6lcWYRCjNQo7ISxWNCydepAhEImB+xvm8YpmXQe1prWHfRZwdSS3Tul2oYyLQj4nZLc5xUNKpXmyqC3KJcI7e/ufzebjIzfOEbduGy1i3nE4pC72HhaFzRF56To8xRl5D43A44HxehQan5FzSJEUrOtXNE8M3+3uuYhGslELK1GbR6pdQfkwJ2ljkFFGrwuFAS9PegJZakRqpMbpR71OEYukc7cGnefoB68Cr4/pwXqivoIA2Zdrad4c9bfr1yg02JDaVKRdY59BSgjUWSYwEvPO4e3GLWsn/t96haoPJL4gpYw87QqC4/L333sf9uuG80tK2VCBmagZSykCjqL7JPhdTggZF1935i8WOTKohHPna9ziuLxUs2GTZQpNCV2lmRpRa0RJ1N6bS3rZkA5sNvvmtd/HeO/8Uv/Irv4YvfOEz+NEf/iG89dm3uJYaA6MtslLY0KDunuO7Xy/47nffwze+9U18/RvfxPsffIB9px5o2zfse0TJZdB0uTdwV8mlIOVrHUTfe19CLPq/hFd1YRFgfOaBAqhLs9MpU/0Z+l7VNW9oFLUDGFEKbYRKNiIc+vIakPy5Ptl//E+ui+wn+psXNb9qV3NDffWIBtWkilYfXU9cV+sdFVLo9QM/Yy90rvIw5SnVOJ9cZKXJ602TurQnw7bg6kmuWR69MW7Xz89PffkePvJ4uUb5qN+7NEfjzHQ06LoWkc/cm8Z2hTBd3q/UWb3r46Z/qf1UN4641gRfTKjoIMpsl1qAhiwh2E0YQwyfb5LXBrH4Z6acRYwZ6/oH+P3f/0PMk8OTJyfc3JywLAuDKhuwrRsezmxswh44ICyV+7GEplcZEPcaqmtTgYYXtyu22cFIxIGRi1crBWcbm5ImYdaNVDpvG7Qq0LrA6gxntVA5MyZLpMUZDWMLlG7QqsJpxbpbNRjdYC1gTUNKFblSL2VNhVIXWnbANgypnMSXNEU6ojYazRIV10qjZEVHOWuh/YScIh+jDVL7+CyET9TYvP3mM5TScH/eUZQIusVyT0szoA1dQLS+UNCC5NIopeAdv4haiOrQxzxDawegjYuhU9LGpLgUQDeUKy3ONUe/T6iALia+NBFdoN6nGrU16NLGAsGf80K4WLF+v5vy6sYWuLn/9HpaIvfOcM/ox8uUs++HvHzPq35E8/L90JkfpAH6qM/2cRqpH4Q2XTdF30uRu/7dl59bvo/KyXdfULpOSyn67BulpeElpYiLRRpuWOibfgzkdSZSSIxM33qSsdUWRqZzYV8HNY7heVyoSyk4eIcfevt1/NAXPo1lnjFNDsowbGpM9B2tKPtmRC1KEZtOLZkp5IXHmEaQ5nFZ4KyDnWesW4CBhZsclMrIqaEWyCRnxTLPsHaS8NJLQ1lrBQqQhNNLBIjDgslPmOeZ1oyNAseYRL/jLO4fNnirxcKUz1dKhXcKANGCeT6IixuRspwzrDhxKaWGsxhASozRTP62luem6/BipBB6mj3WdcU8z/L4PL7Lfd8FJeouZZwgLwud5VIqog2ysG7iBuRkwteIIC+LGYVNz3bh++DnYdPiheLoBGEpQhMyoq1gw9SDRSd9MSCgMcCELI5BPSum1oLz+QxAiRED84FIc9PY1h3WWaS0i/ucG/cGf8fSunV8rw3n8wZAQ1UmzLdW0faA0/EIrYEYRbszU7PToNByg7NuWHyXwsL81fHxjnnysOYlq3A/iQU5ECSTY/EzA1m1GsYBWyAVkWYbCTkloaPyO+2CXa2AXJhFQ61V14EV7NuOCoVcgZbzFZ2t71kapSVoKdiyCG8VGjUMAKjbqJyqNyIXuqMZYIlRZb8zMpWusnZCC91EaGmtthGsV6pDLg3VKYQPH/Dh3Rm/83tfhbUW80RE2YkTYioZIUTESJewIgLuUqit27bAYY9c/92prO8IRdzJrO5NQV+TIAUrpPCmHS/338u62HdfJUnlY0Am65ySYUwDxLWqXe1PQGdvGFwGn1CjLOZri96kI9yqMz7GP+Wd9GHZeCxGI0Aj5t7MXFCLKvlmQitBf2VaM9erep21S5NOQnW0SF9CJzRIR6sDSRHb3z7Mks/bWpHPMH4o54t1jh5NAegWiWtETPZ7pa5+NubCo28a9Q4udRJPp8bjuubRM/RXkOaFlM4RNH55Njau7SI3eHQ99OBznnyx5u6NbH8aoq8XvZDULYqvr7TcL7lJ06jQTbKuh7bdxEb3jEPVkHJEzkoGYhYhJDycI/Q7zwF1qZNbawO5LCkj5iQ1cJEmq19XkMcZzPNMy/hWJXeIup8eLaGNgQbrBCcGGHumgYNpDbHwc1tj4KzFWprcjw2Ts3BOw8EDBSgtY/EexgBHYeeYpjEZBV0gDnmANk3uTemToWAtbc2NUmg1y2sYtFJRKsjEUqyVK4CqNK87a6EbkDXDVFv6d0RFe/3pDQ4zA6i2EPDh7T3uHzZUYFBeauuZDCLA6rZ4UoB1Z5Gs8mX6DnLESZnJo6Hpk1kAj0wLBnTe2oDlWm2PGpjrJud70A2l0crVdEsumN7gXKMq34P4NC5aChrXDcRoZK4QjTEeeKlPuDZKeKypubRF/e++Hwr0UT/7KFrc9efuP/s4jczLj73+9+PHajyCpr/nOS+L5MsLGX+v/9elobwWliulUBRpi6ZWaE1HLjptsYjri0trPcujQA2eJs9/P+dhO7MAEYpRXxhTDDhMHj/0+Tfwkz/yeZyOM4x1QwfkrYPynOrnyPc3eydTUn6mqhQOyyLC9Im6G01LzBgjDocDbTkjg+aWaeJkJmfmWMhEyloFN3Gy0l1jconQyuJ4PCCEHaVkHA4LqPFh8KVzHilvSJnUsZRpW0pTAgqjvTQoEGeZsEcs84xtC1BaS+PGhZwcfQ1rrPx3G85QndZZVMU8L8glXz2G35u1BiUD8zRh3wOs9ag1oG8EYQ9j2EAbSSONRMG60tpxnj2SuD/lHCVklMOOkIKgL+BnFXFjNzxQCpjnWVAaK3+esJ5XGGmOW2sI4QGn01EQETF1qKTxKUXE5GJMoGEtXcqY60D7y47kcOBCZMc6O+h7ENeXlOqwhi45jxC5eZ4k+FSSmStFoUXoZXsIUCDNUCkFZUXsrDRKKrSL1tw4WPf84Hv61XE5XMuYtII1FTGseP0g9qSuYgsBGjvmCsS04+lxgbMK67Zx6rmwgCkF2PbCIZ1KeP11g5jjsBJm6B2RlcO8wjmDPUR87uaId98L+ODFGbf3EdVWZF+QEx+TKpBcQ65AKhWT4TJRakE1dey5FQ2taqGk8bqEIDW1srCtjUVrb5qK/Kw2UqlIRRI9QWJjog0dvaIhXdRaZqlYw8/74m7jtaaFPdGquLdlxCST51jYrLUKtCJ05ovR0PU+qzWLcnVNgWpsU8Ym2yB5NSyWgEuRPK773vA0AVmUukJCuK6ZgXRoCcnEKNIHEiH1C3+vWwVL2WZ6tYnHKEIDoKygHV1cLkPOJqQteag2LLpra9Cid7kwKa6Qj4E2XIppiFtYqzwhQ/PK1g2mMROm067QuO5DsTGmJTeNeVRHkBqbF7R21ezJJ9OX5qy/z9pYdCq83CT23KWOQPWPIA3n+N7753mMPhmhcCtoWc+v79jHbVQHm/qF0fd15iC1qyYFjwasfcCkJX/KaDvy966vvYYGbc24Ri8dpmQiSeOt5fWKaOKgiPSQfk59TP+OgEtTM5rAMZwXMwrpDKk7avJ8rEH8NIl2FGiCIqr+GTXBBiPXMfeNJM6kjFrQmtTPPlgOUeI2FNeKVCpCLjCGNtHaahQZZO7ew4CNpi8O1ipYedxxnmgA0LTk6FSoUmCdQWogrRIVVgHeA0YL0KC4NhhBc3pbpDVThXidJ3zc4xM1NuQZ8sN5p/GZN57hM6+/hiBUkLuHFfu+Y90DkmzAtVUYbySHoXIh0l3030XEnNYbA6TURtF6najdLzby3Jlh0XnG7HYvSM8FTqxDhN5/3gvuKoFcWtvR0HCCr66anMcFfe/oW18gr/5Ornh095n+fgFcwcHqGiV9aUJxjXBcmp2Pg558P/Tn5d95BCddve4wBMC4f64+Q5OJCa42i76wKvnMV3Q94PGLyE3P53r8fT5+v9//c/L7yygNslB0q0qDHrhYRDh//ZylsKjUipt3rEnMDdrgz85W4603X8cX3v4U3v7Mp/Ds6RM4Z/Haa6/jjU99Cvu+4d1332UjK7bOtO1VSKXCeY+UE5y8D4BNvrUOx8MBPUy1vyttCBVbxXwFJps3hog1Xj/MlmCzsswHcRKzCCEiS/E8z8sQ3C7LMorw4+GE83lFazTpcF4S0htd1pzpacH83m9OJ2ij4WcvaeZN9CgsOvZ9BxodEaHVsHBWCnxMZkp2TURqSsqYZ3/JytENt3f3DLyLtL41ziCLk1vPm+G9H8d1zEybaWw+PZumlCRoz6WgyZnhXrVcQjGXZRmIMMX1GqWk4boXc8I8zfB+wv39nUzbeGmv6w5S0Lg8BtEkzfM8nM26EcM8T2M40gNGraTD05SgwFmHViv2PeB0OiCEyOvYarRc4Z1k42iD1gy2jS5zNGeg3qgj3pP3MFYjBFKl5nnGtm0IIcp6xw391fHxj594covJkz9uJozpMhRQl0y6SL3n2qJp/VzmIs51FUUyTOIxow+lqNVRSLkwob4W0egopLlhj2dgNsi14dPLCdtnZtw9ZHzjnVvcrzv2AGyxwJWKXIGYKqxq1JVWIKaGqoFqiPQwAFShmoZWwOASmdoXETk3sOGpsm53G33a7lbAsMEhZU0GRamiqIis1AiEZohtn3iL41jrjZRMntFp4SAKJANBedDICQIuFr/Mo+no+WVAOMT7WtCLBkErqoiiJdHlaqp9KRj7PoWxzfLjq0fFYm9ImjQfXR8CmbyPohidYXBFb5N/N6UGNUk1OnChge5zvanpg5debGt11cDICHC4s8k++6iRkNftjmytI++X/b022tBrMEek+x5obeQ5+N1o0YpcTlEbuhetLyjPaM7Qz10vii+NnVK9Qb5oaa+1MT1Qtorg/eKWe3FE7TX/aBsEYePZ8WN/7M1nHUgLBo7U6J5BBEGarVb5HXZ6FgscYlk9b0YKFbRGDTkt1jsK08bArB99cNYbA77xIuYyapxXNMokMM4dxvXY/V2UEn1ZY5Hf8wBr6XUWmxWtFOZlpt5WrictWpcmw0JIjasNr48q9DCl6KzaIHplGbj1+sRZh56ZxXgHA93okGatRlMdKQZybYAi4yLlihDKGBzmukMbOrgiZjE2ciigMQANv4DQGkIDJuthjUatSe7tCoOMxbIvKFoGqzlB14+/r33CxkZhkgC8/oWXUjFpCq0+8/oNpukNlFKwiSXd3f0DQoy4ezjDenaM1lh4Y2S6S2/yXhw4Z9FAAWZPje8c+N6oQCwws7yP6//3jIzrSVBvcq4bFC4wVwhPq0Az40IdnX6/Wa4u8pfRlOvnVaqLy/qyLdMI+e8u5nzEZB2NRMNjaP26Xejv5/s3PC8jPN+L+qDfv/2Vxz+vO2T+sV0979i3xueTOxQvozWXINFO7+MTjE91PRWScyvzi0fP86jp6dMzyGMbp5k5J3nPnfbIa0e1bsvJBSvLIqSUwnH2+PQbz/DWp17H597+DD71+lPMs8M0Tdi2wOmBMQgx4bvvvMfmYjkQfWni1iIN9PF4RAikCd3f38NqAz9P8POEnBKSBGDGGKWwl5tX8TM8rBFPnhxR5do34ttuLTNgHh7OgnQYtKYwTYs0AxNiDFz0NekYk59Y3ELDOAtnPFKOqLlgmifc3r2ANQ5KivVaKoo0KCkItao1UuNEa6KgcDodsK2BTk2R1793DFMLon9Z1w0KtGqk7qWOontdNxgzSfhhGwFxWmtoU2Esp05KKcSYcTwuw3GtlCo0VQWlLOZ5EpQFOJ/XsRj3PJxLllbBuq6DYsaBh9AENJ3LmroYVFhrcH5YoY3BNLnhhhZDIK+6VtEcsajtG3bX2uTMvJ9t3cf6aLQRAT+NAPp7640RBzT2cq9drVHGGMRYeU4VdR3WdGOVjLQV2biVIGikBlRdJEdDjfX01fEnH4uNmK3lRh0jrXJLYdFTClJlY2uNRWuWK3fLKDkBrcJqB6UgCfZ6JMiXlqEyU++VIZ1GQcO7hmmySFUjpAxngZuTxtObgs9+6hlibVhDxnkvOK8BD+uO23PE/UpdTkwFMXOqmkpBLiw2xpIpxRGRHNJOa6topSGVgpJlCi0mFVaKptKoISijXm3DsZKTaPmpOAxe1nJ1KfQFfZBx99U+2qfoSgpM7vWdjqOlMOfAio1PbQ3WUJysxfIcitSZKnSbUorUigxzrKUgy4Cyi7mtsdLANYkZaEIVVKOI1VJYKqM4cdfUKQw2iLhDGm1GIGaISRgCdNEcou6GYTtdCyfeTbG4d5brQKlVilUOzLIMRLQ2iLIuK1QOjQoRJgYcttEwUfNBPl8fXDpnx4C2xyMoTSOLUoug/9JgKdZiOWeREJRRbojVgtDA07AjV5aa0VwKlnkme0YslqHapZAeqIwMhJWCNgYpRaFRQnoMLbrTCGuoSYS61GBA/27UGOByD+HvKcP/rk0Ny2eMa+7SkNRGeifXcgugCY2aFPJ+3ZZc5LWV1BlF9K8dSZF6skjj1yB6EYhDIeS96tFv9ma0DxTZTPd7QmqhXK5qHBlOSG2ojcY8eRwO8xDhc18zQ0dqrRMXU0j2HGls/XvQhq6r3L+JUGmtoOScO0dKtHMO1hix/QYa6ogRMNbCyTXrrYc2GjHS4r7fF4fjAaqfi9ZgZY/TGljmhRIzoa+rWjHPExQKjPFwzmNZZpo1gZ9bGwOr+Noh/TtCbFTrVJ+LSxLpHwqQYi/EgNYqZmsxW4/T4Q3kkmHMW9jWQA65Ah7uV+yJVJCqmSaecoXzLAisLGYNGGLtGKNcYDxC7LkWvAjDHmCNUHhad0Vqo9npR0dvOnXkMSJz+fvRdaM3Nu3R7/dmi9diF7c1AGbclJe/641ORzyuphkAeaKPEIdrXuWF3nNpHD7iGBMm9fhn12jIo36oXf2Q77dPui4ozmVB6Z9lLDD97+X9j7+XTex6WDY4vv2zXTU2vdjnOtS+p/kZw7iPaCQv5xdSOBCh8dZgmT1ujgvefOMp3nzzddwcD3hyc8KTJzcMPGsNk/f0sU8Fh8ORRhViC6xbhW5A01pCqTSg1aAddTQghIBlWWC1gZs8zutKAbdM9QHy6rugm+uqgnVMquZQhk5xfhI0skbMs0OtvNeOpwX7FtADIZVmsJo2BjEXtJLgrJWCg1Mo7+g4tm8Bh+koCei8vmtmwT9NHnsII0jz4eEBzlGvsa4r8zL2gGmaR94NTUCIFHTbaq2ZBwKoR8hpz6JxziIlUsliipj9LNz8bhpSYa0eKMj93QPmZRZNlcO6BqxrAKRRuTkdmbRdKx4edkEyjNzrVRqlOPRG/Tqm8xodmQ6H42h4jHUwptMeGOBqjUGIAfM8Dz1EEYvtlBIOh8PVAAHcAJQSPQ3vZe8nrOsKgA5nRN7690jDgD4Q6RMza1g855LJZbZG6AwFJbNpLJLQrhSvv8swkM2e9584e/lP7XF3v6Mead/e3eesUSi5osp6q41FqkBJEd5bzBNpoFBAaQ7KLEAOiIXZIxENVVlkNEAbxH2HPdqxl1nrsJ0Dsi3QMFDWwlaNuAU41fCGc3iSK2KsWEPC84cNUZyRkjQ1NM7IYkgQ0PUed3cPvHaswbrtAJQES1Kv1hqd0XYpzGtt8rwZxTSY0mTfpDub0QBaha4NzioozUn7ngpK63vaZX8aBVq3YL7S0Rij4CcamjhrR+q80bSQPZ6Y/WOdAxQHScyeIrXFWSsuk0qQWxb8DGy2l/pg7M0Ns/c0HpHagI1RhZ8mGaw1QUu73bTsauIyB6XoMikNRG0VTTU4bRHDjnmhgyK0OMZKwY5SoBvv085qmaYjRoNnjdzj8r68Q9gCw3q1pk22AkaWjmYQrxUTki3E0bT5ibrBSdwjJXWHLlmq8XHOQynLXJJGY6fJW8S4D40Jqb50prPWMQcpR1hDDZWfJmnKyIkk64X7fC5cn7x1wswhhWyaHLZ1g7YOOSd4Z5ltliMA0vSc83DWIaUoyHxGisziKgBiyDgcDnIOPWKKzEZRvA6CDHhoNsSGvUlg7uQ99/V8GSAWee9NBuLUfhYJ4yXSGMTyPecyapE9MiMqxohtWzlMLBUxFeRuv15BCmqvpyColGIThCYUtk4l7PXkGH9DkBKDeXLwfoJzHMBZa7HMM7PtnBWKI1kbXeumpeFn2LaX55JcNGtgwGELwGbOeTYpWujSGgyltd2xVkxrrOPg58npyH010kTleDohp8TBKRRazpgOE/YQMU8e3jns+0ZnVMNBptYyXGgX/ZH3DpP32DcaDNVWBpCRa0GK/440Ngw3AlrOcNbSEaY16lWk8QCUODCQmpKlcLVK4dmTAw4zi543nhyxxwQYjTUE7FuQBZaag5iKCCNpBUeBs5PpJhdWa4VLrBpKzpiEl0mOIC9QoCFnfomXwL/HKM7L/7+mllyKZj7XpZgGHqESV1Bo5422ltEnVRdTgjoaltqzf9RlEtDfHxGdC2T6GKFRV68ndpJKXVwjBWLpQ4sxF+gvd4XiyDuBdDMX/OaqmSEa0p3L1HhOXqDUW1ysIrtosmsLZEJxtfl1bLy79mh5rdG8dIhovA/+0SgNVSWrRZpqoyn+naYJx8OM4/GAJzcHfOqNp5isGY5RudZBb6RGh9eMdxODvmLCJvSepvRIqT7NC5SmJWJDY3K15Y3OxONyyWRpFXnbWAhLEdwF8ykllMzFOcY4smQoImczXxqL1ybIpLZ6WCYrVZFzYfCWTKB4TRk445Bb5uSoNk5PwOTrKthvaRXQpKaVAGzhAd45KAuBkTsETYpfrYWoTqZtO78+LtTrtmJubNJYKNGprYeLpkTHt5yLfFaNZZnQBBFd5oPQSDN0uyCrfWDCTZ9W0Pf3DDDz3gKNjmd9oqsUMM9u0PQ2Ee/3MMwu6O524QDk8RrTNKOIUYlSGs4pef2eDWA4ZVQGChrGOgYfKqJnzrGYsdZI9k4VChqbFaUU5nlhg2TtQJV6sThN3IB7CGi3GuZ9wO92ElF7lmupyoQr5Yv1bK2FzblRYoYgw4OPP9z6U3/cPmyClEvTXzO62xbRCNnkuSIj7NyQoTWa0mh6BnRF6takbkIGm9jUAKgZyR2AwxHGOoSYYbSF0jtsq/CThVYGD+cIN1ErphQwaY3j8YB9S3h2d48iU9qcM2AssqwZJVc8f3GLmgJa05gPN0xgLxkhBqCS6uKdR9wDloNnsO79jhjiMDZ5770PcXf/gIfzhj0mlFzQUBDDhpsS8Td+3OPP/9wXcXr2WbzzwRkf3EX8k1/7Kn73D99DqwZ+mam7qxXeT9IkKHhn4a2FdUaKF4PJU/fnrIN1Fs47aK24TlQ2IafjCT3oG1Bw3uIwz9gjdXq1Fszew3oOaGqhaYd3nWJVYbXUBEph8RPRqtZA+jvvMUDhdDxRN2cNqgSbxhCwbTumecFhnqAM3eicc9Big5tihBGtJQMNLVIOMKrC3b8L9fB1vPHFz6Iqg4d1x+HpZ/HibBDqLE1guxSi1sIbj/N5hVFgk9GIynHiTiaB0azDagWU0Sg1AyjwxkKB03a64NGVb1mOCIn5Y1ZfMTg06UohBChNTZCz1MvUkrEsEyUEkQO0CsWGx1n4aUKV9XMNO4whJRPNwGkW0TEGHA9HhJwRwi6hzUBrCg/rGc46nI4HngcxJkCtSLnK95mRxOCHUQbMTFNoeDhvrEvA6uWwHBDEMCWL3jIX7omTt3CGbl65AM5o5JSgeh4NAO8s1m0FJBIhpYiQK/Zth3cWIQZSsbSCsR5aadzdn4e74b4lhBCw7zs67buWgvO2E/2qHFIRYVRjEFFLQUHF4UCKMoM12XAaQyMQbx0Oh4msjJxwenIcw4LjPMNZg1YA4xxztxRkAJBQS0ZrCjeHA/awowG4ORzgnQWUwuS5f0ahvne0Jsc8agfrnWTKFdFwMngzS/RFH7bnHOHshFaBkBOcNQzZNJbDzGlGDAnWGWjDvdxqI2YHBrlWHI8HTC5h8hallZG/s2070idgWH+yxkZpCa9TqEkS5QXyVQC8IhWlNuZoAIAtibkaTjOtOHEikXPBNFloazA5g3acedELjSWlhIdtwx4zSmnYYhpNTxMhYqd65JIBa8RAgPxeW8wolCo4SQo7EaImi2LXHlBoCYEYhRvcLrbRveh6jBA8Rg0u/21ElIfR9PBX9VUz1BGbdvWzC1XuogUSYX63uuy9TYf25T038cxXpsP93HcHAiKo0wV16Q3L+GYffc8fpe8hnKlwOh4xCUezyPlTvUm5QpuUEqcLQZCUUrIYXywSlVZwfeomIkdnOa3uTmXWOszew08Gx2XmjewtvHMSaMlxkbZGUsALtOGkgqnb/HzWOaEWKGzbhmWeUJVCqhkxRljnsPiZKItMXp7c3EhGE6kepBEZaG3H93ihbTF9e5omNkxCgwo7dR2H4yxubnE0WCxUPSl1Cpidg7ULkUnZmI0xOJ9XWGtxOC4iSjcoRcF5hjmWmqElybeB7y2mAGM8pnnG/cM9jHVwzgyNyfF4kE2GCAjQEAItoXNKLBA8baytVUiJ58l7L0GZFqyeiVB2NMNaC2MM1nXF4TjjeFywbaSyGcOhxvm8juaja+AASINAqpYxGtu2CS1MiU20gzbkVhfRA/VGylqNaTqgNQZXBrFHpsFElcaGOTZsbgyMtVjPG0iH0eJ0xuZjElcsP02kKMQLSgfJ3DH2MiFuQrew1o3334Nms9gH94ECLZ+LhIN6dI4/c344pZymGTEGpBTHeePa2gvwKg2pUGQLp86lZLE0D9+7gL86PvKYl2kMU6yzULVTchtCrIi54Bx2zN6NBrW2CgO6LubwgAYFZxRm3ZDjhgkKLTZOo/0BvgG6eljvcTAGKBrR0BDAFAN3eIqSGp5OB+TcEHJEygkfvPcOXnx4j1oyrPfYasGTZ8/w3Xe+jVgyPv3mp/GlL/0+vvOd9/Dw4hafe/uzON7coKFh3XciF6Xg+fNbWtPPDn/hp34YP/rFT+E3f+d9fPe976IzZLwGns0Kz+YjvDM4TAZvvfU6fvrP/hi2b30Vb83v4NM/8hmcI/DEOvzIW0d88TM3+Natw3feCTDW4tNvnPBDP/RFzH7GHjPWLeDZ0xNaY/2wbRtubjxUZfNgjaHWrHLq7Sba4mutpNFnoVpqowWsZuFtxTgmZVKCa2YRVwr3y31fMTlN2hRIRVWtD40Mkc4YscxsIr3LiJMCQKttNMCcJpRq4f1EdKJKbphVKI1W7cdZo6kIbbXQDQOUqQh37+O7X/qXeHoEXmDD87tbvP7ma8jxOfYz8MbbPwWYBTVneGeRS4JTAa3e4tlJNIQqiVFbw3beoGKByxFPn7zGXI89oukG59icNSRY45FSQU4B83HCtgdolZEakR7vLRoybeUrkLOGsmQItNZgDYBaoC0w2YSQMvzTGTHuKCnDzWLsAItiNJxScOkW4WHHzTLBW4vJWcTcUBcFqzdU36C8glb70EJjaqgtwpp9ZLvlUlFVg1sWKPWAohSKalgWD4MdJWxAVci54vWZdGKtum7pHm1pqLkAE5kYaBxEauNkPS58fqPpmdAAbQ3IwNOwTw2MbShpRTMFMArxEDFZh6Y0ShVRvo5QqiE8yVDaoRQHYIFWCinsklfHYOiQCtw8A7WI0QVrqlSJ/AOMUUFT2BNz4axyQo/jjem0hobsP2GXPb9epR9BaHgV8zTJnxtqm5ETB2LeNgCTAAEZtfB+tWYHWoHyCiHRAEiZBn0wwzXUKA1THlDRUMD6qgEwpkG1M7qrRNMVNZHWmkqEVQa6KZhmoFBhskUsAaryvE+ZQ2rnLfP+rEJ4eB83MqytDWiVaLNFxVrix17TVXs8uv/I4+7uDk+fPsX//n/9v8TxcGRj0BpzPDT9vYtYx3UjAOc8O3jgKh8iyabehI4hE2SZrHo/oaMiWlOoHVOSdFc7BMMpJoQQEYRvn0vBtu0oBQz/ihGxMHldK1rIMQiMiJBWwrlsWSDfjJKy8JIJE+Zy6SQump2OtkgDdYX8DFOCWgXx6eSQSyN0OdV87t44XRw6Lk3S9zMNGDlgV19b6/+8/ibl4XQmefx8rSMnVw94uYnhv8cPrpAc3ojWaMzzjNNhxuk44XhcMHkPVS8uHr2JnCcvqandKYswaeekA4B3nET62bMpEbefeT5g31dx6FFYlgNCjJgnmQiO5jGjFsAKjN0bi55yT8pFgTWcrp2OByb/tsbiUs6LVsLhFb1WA513up6r675CCMPEogEC9dZBRTqfafGrlYGxFwSzB+4B5LjWQuczP5mB7HjvEfYMrRlMq7VBroXJybUiFzpxoVGz0lGjGBPmuVNkGvY1wlgzrsdSGH6ZMh1XQoyYvJPmijoSokIepSSs6z5QCN6Thu5pSuF4XOScVgnknJjgLhbJpI+dMU0zrHXSrJHfr40WPrkaznadIqNUQwgRp9OJCKJcps4JF1xbrOtZmgczGqOufVmWA0IIPIeCzvR7uCO2pLbwugl7xOnmRmg6SYyGLo+pjRNaBWA5zDg/bGy0UkZVEOH/RVPTUUc/2TFY6c1MR7VpcT3BGIvDYcG6roLaUZDOdaEbrHTqkh3nly/BazGljFKyoNddvFxxXjf87/4P/zlub2/x5MmT71lLXh2Xfe1f/ef/KZaJKCyprIq2pQ0oqYusG5qghEaRQtqHNiVnmVxeLHtbzsilCX2J9KaGRv2NBtAaiqyHaBaxWSRFSta6bbB+QlMKYY9IOcNpg9pkmKVIsVLawnkPKIW4B+Z5iR6MiG1GU0ZE/4B1GpPRmKyC0g53531kYTUQpKKQWmNyDt5IuJ9SULUALaG1glzls0OhFIOqJ+wZcNZg8eqSZwfIGgxkGJTGafA8abSaUAvXTq5pQIWBNQpTR8RbgSoFW6xDC7fvzI/yjoHAnTrECXQV8XVDk4I3ZoPcLCYDHD2gkEcCfW56DAlZ1LPa7UYsStD9mDK0uIY552hTy/IHWtzktHw/SpFedjQJNQUJFzSoLWOaDF+7GcB4ZAm8rK1yPxFNAT8XTVIYhGigweESNHUQtTTEnFC1wsEZKNG30NWqIBUlZjtENYwCh8Kg26K1FrloumEZB+c9Ws0oaYN3jEpQSpHuK0PIpvleqRkyyBWwCrCqwCiQ/t+4vlZlkGWgtO8rdINoQAqsIHnazUg5cYjQKtA0Sk0DXa9NI1YASuGJNyiZ6L3TjkY+mfqXbt2tjYNqZOvMsxcDACHBa4OcE0orQn0nEqsFXQopYbJkBNWSh51+N3ZpkNw13rooQus21oqRhmhwpOboDoNGG4RMyqZVig2ONTAGQKHpVQ+3brVCGwujHBrYnKRcYZ3BpAvRHXITWasY2tPTbVHYFcaIbogW0KSNG6kXRXsndEkj4v+UMpwM5EotCImsAmsNJufIHZI6v19jChzuVHHvVFBCjwUOy4JSEq8VqyXAM9NZFgDQUGqivqfScc5ojZAStKb+h26SILunNgAVL+43/OL/9P/4sfa0T4TYZMl4WE4LQhSeeMowls4K3JBJowo90RzkEpIawhPGYoOLbZQGZ54PdL4qeWgXnKObkBFHppwySs44LhO81bg5Si5GKbD2dRhtkWLG/f09cqOxYIwBt/crOcSloM2cyhpxuCqlIiuNKlZ3VR5XSpMsDyVQcZZCtyM1TURrV0V8u4j1aUbAhg9S/FJEJyezI0QvNUUXtOZy1NoF9sB1H/qyUcD1f7/8s+t/s/FQoO3lYw3NRT8jr9j/pS7ZMg1Nvm/g7n7D+SHA6Ht4Z3GaJxyPM5Zlwjw5zMuE47KMCRuLRQpt+yLJSXPC8eYEJShPM1W4w36cc6V47dycbqAUEEIceSLGXEIULyjANX+1W4Y3TJ55H70IL6UAWnNa3iqOp4M4U/FGVJoUuN7UjJDPyglJ509T91IAaPKTpcEAutOfxR5JCaDAXo0CnxSOgnleCPOaAucW1JYkyK7AWI9SFCYnwvRWYXApoHlNc8G11mGeF6zbimWZxHGMxfHgjUux/fDwAGsnrGuQgUIWYbsaYsuS+RkZMkmUwTmL83mH90RYl+UgVyYbNGa/FIp65fwtix3oZIwRYY9wngvv06c32LZ9GBIADE/USiHG3pQ0OMcBSG92OlUFaFjXMzeeK+v3TmdxzsF7h1LaQDyaoJnbvgFQMI2mBq2RV1/EjIB6qQQ/dXtUNdASToANnGeWUC4Z2xbEvplNFyfNBvO0yPrD+4efS2HfE+kXVexyM68dImD2kZjWGCu2z+Svx8ifK/l+jGiDXh0f76h6RqyNCeBas4AzBndbxt3tPT7z5hMoyN6mgAgFZShiVwBgKj48BxHDG+SSEHby1SencXM8Igj1pSMUtVbEfQeURqwaxi1YnEFJAZ9+/QbbvqPVhmkBWnXwziFl5hwdTjd4CDtm57DMHqVEqGVBbSfsO3Wox2VCDhsmb6F0RSpJkHDLZt06PD0esW8PmD338VobznsAhGbujZUCnlJyVZk15xqvwQZgW1cY6xj2J59XtV4sWcQU4OwBqBamVcSaUYvBvJwoVldN6F8K67lgchq6RVgNmKphDjOyzkCrWJzC4jxKIXUo1wIv1LyKCjcfUXKFsg6lsIGZKm2pS0pwfgFKgFZ0zZrEnlcJxUEbTpuDpa4HivlX53VDTBHesuhKrWKZZrSmSb+tma5WoEC+lAZVMjBnIgRizrCHlQHMxpHKugfsKWGyM6aJGs0CoGqNk9II2wMUGpyfofWEtgdoUINijMGh8hqrTQMloZTEpkF5zI6ZKUnYB8vhwO8zs2CcpgmqAUvgum7djNIaYtA4HRYW6NYg397BiaZHGUPNmYK43GbksMO5WXR/tBD300z6c95Ra8ONt8gh4HA6Yd82tBqgUTE5C0DDW40iBh1Hf2QTkQu0dVB7RGsK2lFDmVIFNFPu3SQ2w61BtQorRgK+00oVnXabDLVrqfDzBKVlsGZmWOWwh1V0jKyB9OEgvzPBC608pQjjJbNFA0qf0EqGUUXuzYhUMmmfCvwuC5lKzjuUBhhDeh0U75GqGpSqmJdFmELU51hrse8r5mXGpEhXbzWjFNKaST8UyrY21AGjQDmHkuiIRzS/wRnmRuZcMM0L0ApijiglM5gYEUZ5eS6NBg0j2jgvw7cQWRfpQopbR5OM1miqjbBSaz2ymFs00GQAjZRNTBa6svneQkBtFXvOUEZhmai/pjOcaJEar2UAiCVLnt2/I/MA7xyct8gyTSB076C1dIDGiNCWkHDOZQSSMZV8RskM8er6ANJELGHCzqe0DsRghWZkaItJiA9SVLFoyJmCtVoSpolCwsNxId3HGjhJrt+3Hed9RWsaMSWEGMfmse6RBR/AhqiJHfDiqXWpDaUaKAhPvjB7oqGh+/LnlEdzcBHGiztEueTtdKeSJrxnaSfQ3Vn4OGleBp2sjr/rshzgo5sYFvOX3JaPFv0LrW543wvNUF5QqUtoJd8Xxk1vrYXVCsvkMXturMfDzM5ec6I1T140AyzoaEVKd7EmSIjS+tH7omCwwHvmi2hZfLZtG9Qtuk/x7510+fM8jUJx23bkXDFPfiApvVn03g9haP8O075jsm5oxYwxw5llORzQhHbZVHfr0aPR1SOEMgobSw8nJa00lhNtgMMeUQqzZpaF1+W+k4LlvUPKCTRlMail4RzOUsyKdiSKtbJzKClTuNmThRU3ON2MICIs1rdtF2cfCl7P5x2HwwHT5NkwKOFtj88j9pFQ0riIIF0oAlorHI7k2Pq+oLUm1sYO1vG72UPAMs/omhGeGI1t32HEGKCK1iklCvvvbh+gK/U0+7ZLgzTTUlpTNH84LIixNzBVMm0y9n2DtRTjz/NCB7pQxmsAGMivMUaMAy5wNqluRswFwO8jJhxPR4SQ0F0MOy0G6GJZI81eHY/LmQGx/b68kSb9/LCx4Rm0V0FRlYG7OifUZVUJeDWAauIwR8cahp82aYgxrvdpctJcOW5YMQtd6pOs7H+6D3f8LCarYBQtureYkI3DcWo43jzF7GjXnnNBEZThOnE+54zT0xveF+LYND9R8M7AatLNtAkjfDFW6kVnbUlb1RpbTsyKqECu0qDqhlwazMQ9zFtAnyYYW/C6c7CqAYiwrMFRasZ8dDT5SQmWSZxQtWIxFjFnrCHCT+Tfp0zHyJSEQdB4HVlrWFzUDC1TeO4vHLCRD89rcVkWsiZyhFEWEF2DcXwOO3mZ7HIQc7IZod5iXgCtKkoMuH1+D+ue4Pb9gE99+k02fy0DLWMPFbEAh2milaxQqlMuKI2p7FoDRivEfUNrFTMilAx+rNK4cQah79HioEkhcs8boRNcjQpWKwlLzWhiIqIATI7UHLQGZwBdE2qzSGHj/afpsJVCQwZZKM4ooAQUpRCzWPg2jaoz1M4iTQEoMZIyqEiJSqUCxmAy/A2GnQJWVygY2vbbAq0aYsyA9iiJQefWWtSW0VJixozxcJZ1B0oCmkIpGiGdYdDdQoHWtYha4xwCUYDS0JpGgULNCbpkxNLQtEbN1IpM/ohYE0qWAZIFwn6G0gZKhkK50EXs9n6DdQ5u9gjrjrhGbKFw0KjF2TPzuiowKKnCKguNivOWYDSQckWrAcflwMFypVlOUxohc+20WqO2MobZnT6tvEWsRGBTbKhlQ8lnTN6hFDYLzgDIAXuOWPyE+z1gniYYReMGpSxe3O44HRYo0wCtcXdOKK2hgTlwm0QkKGWw1wqjqiC8AdAGCjJ4FWS3lvXSEGuNmPn487rRrbBWIq/KI2WFHAqaJpKqVYUzlteHcUi6oJUGVAWjKnIjMFC1RtkLjGrYYsX9OWCZLQ7Hp4AmLTrXxpLbMyturwqtNHi7oNWGcwwwrQl1sMFY9gI1B3g/ESlWBtsexfpcJA6FeqeHPQLaArC4e9hwt1fcnqlNggJqjvBOY1ksPvfma3h2XNBKhVYT3GKg4sff1D5RY9NaxeTptFRaJX/OGrHdxeDFVxEPWtuRjjR4/KUWHA7M5gAanPUiqGrwbsIeNnR7P1JQCCmiNChLS0c9kCAFpTKMoXaBFCTy0Dsq1EW30zLBek63w76jtcNojjqsvu071n3DFhLO5xUlN4SU5X1XcbxoKIZN17Uvf53lvxuo+QGbmu6idDEiqINJptTFQKDxThAYjq8F6aJr7iYEF/JYtwgmUqtFgN/1QnSFGwGVaMO2n8LyvhH2kMoeYCaWBZpahNl7LN7BWYN5nmA0qV7TvMAowAptp/dEzjvMgoJM8zzSz7WuojUoACgM7VQa2tVSaL3vG2oFpnmC0UTN5nlGtw/uiIkRXQ6vre5WRfrO8bBIs1DHtWatwb7tMJZFSLdpVlpTkAugSOirEipQjombXFPivd8piRrn8wqAKItWYu/aAOsdtNKIMePu/oxSK56eTtBa8ixqb+LYtCTRYBS5DqZ5hk4Z1vA1klx72ijElOCMg9XUYSyHGQBRwz0w/2WaPLQ2OB6P2LYgRTlwc7rBuu3Y1nXQovr3TpQnM7Fc0R2OdDQ7YPVSulMMaWKcxEZM84R93VGrh/MUPd7ePaDVIlRA5hD4yRN5FcOEbo9MtFJjnmhf7ayjmUPhtbHtG+ZpGqjmtm6oLQONnvzT0Q/XuW1bMTlpUjMbn+5KR8OGdEEcpervCIm1jmYCJdN9bt8hQKwYQ1AYyzBQXsfWmkGl6ddfpzLFRA5zlQaPAZ5kY3ovou9SZG6j2chFarFubm7QGrDvYawZQKOtpuoDC3FJBPnmAC4ZI8YgpwjzA8wTXx2Pj7RvMF5BWYucOHB78RDhdMVsK86RNuoNCpsE2bUcMTkLozViroi1YJkdciCtuqEhduMH0ec5MQQx1uD5/U7hvPG8hrRBKBZVW5SksD6cUXOmyLlp+MOC2hSmZYaptEzeM1GgCiMuSwaqAikWhB347rsfIMUs4umGEInIeMcC7ObmhJubE4KZsSwLVAPCFvClr34N337nXZxXcR1FRyYZSvzkeMKzJ8/w5MkNXty+wJ52ofZA8jE03OQxeToj3d49x8OWUGPCz/34U/z4z/8EwnqL97/9Dfzwj34R68Mt/s3vfgVq/gKev3vGW596HSgGz974LLZQYGaH2FgkG010FKhQxuD27g5PDkeUktB0RcwBzUwwk0fTvDedsZieGJw3MQ7xHilEaEMmSIgRx9MR5/PG4akmEqpB62c0hTVETMcJDWymaL9ukUpFLRVGMa1dWYO8kwWwlwg7cTCarQw0rIY2Fk4LTZVzRU7CG2k+KReYaYKeGPK8BxoGHA8TY6ybxl64Bj2c71EbYA8cghljZO8N0ACOywzjNPYQsIWAZZqhoLDvG1pTOD55glksdo3VOG8bUi24ef2IdN7QswK1UUIhazjenHA+b1DQ0KcT4rYjKy3NtLjLNaDqriek3ktDqMIpIx0zaknQB41kDYrWSErLflpppQ7AzgtKo05623cEFRlsfZixns/i4kaEO+cE4yxKzDDOoUxky9jDAdloOu5GsnTO5w1bDNBG48nhSFohGnZx16slox2PSJHFeMwZjeUH1JSxKrHD1hr+xL/gPa+wqw1E1nkv2MnhfP8gw/2JTZZm3Im1DlkoYABQapLhLimC522D1oZ7WTeDOB6HK5uzFsp70sqnmeZGRhP5q/TGOx3F7TVlTMuMFgvMYcMGwN/cQKFh2/dhHx1iNxJwaKDxRsoJZc6Y/ISYM5z3WEOkE5oC1pRwWBj0XVzBtgdM8ww0YPYOTVXoI00hGhTU6xk30Pii84ACjsuCPQYYpRBLBkrFHiNp7VpjmT1g7j/2mv6JGptSK+7uHoZNobN+TBydc8IL1UK3IMzMorT7bpOysu87vJeiRIoHbTSpF2ItV2qVED0/9A6dvtbpbQDEk/ziw50TOYyc0nMCS6oIi6qwBwk3rKOwZwEDHJYZ1mi88cxi8h7ndR+Uj3XbyGWWG0Mpcqt7UnrMeTQjrVVxxRKqF4BU8ujSldKIpYgXu6K7iCO0X2qBBa0VG5Ml0STgjxMfFshmpgsM1IU+xrZHvONbFWid58k6hRwyjDbiOOXpgKKAyXvMnqKzw+KR4g47edoyask1USyi6WrhEGMQcbkfPFHrLFJkw7Btmyw6Fc5podOU0aSEwEKtm0xQiwCZAFahDFXEmGViTaRDSVHnxLed0ywW6dPkyCdVDUUXeM3GqVYLKI2USCnadtoDh5BQVUFICd5KhlGpsDKxaPIdGmdQcxP42wCw5DjHiGmZuCg1BWucNAUOUIr5Tq1h3wJpLsYI4gj0UL9SCrTitH1SnOxnQZj2PQodqmFZDiipwFl+ln3fWVSliHmZkXPCvie01nn2nWYh+Q3awLqKw+GIFJNw6fv7EH2Tt3Cuh2L2bChS64pkz1wCOil4NNZimv2gdioAN0+e0AZTkMaUiGRo2cynaUIMCd1ljw0vXWaOx4MU7BxYdOvo3hROfhZEiUjZNM0otSDHJO5OO4w0wTUXxMghxeFwwP39A6w1cM4j7AHdCr7rAZUC1jUKLH6hoNKxZqeGpmKgkUAVPdUqzdFEXY9SsK477jS5BqugLuQuNxmAaE26bG/Eup5PS9NDhDIMxLDrDGn9qUWbpLCed2YCKKAqhZZe5dh83OMD/QacNjjOB5RcYJzGdJhx+/wWDxmYDgeyBlKEmoi4PIQHnJYTnj57ivXhnuhyqdCzhjkZItAAtp2az/v7B8Q94dOfeh3GGkQd8ZDpeFQbBiW1KY2vf/3b+Oof3dIs5XSDu/sHvPHmjMNxwRQ85mXCZD2q4n2xnneEmHF/XvHixQPe++AO7z9/jrv7O3zhc2/DexIVty1CG4cYAsJ2Rti+jclbvP76CT//M38Zn/7MG/hwu8VvfSfj2+82bAFgJgdQmoZRQGsRb74BnPYCfPdDhBjx9MkNPv2pZ7i9fcC6rSLopivU3Ysznt/dI0u+yXu37+OX//AWpSTMs8Y/+zdfQiwNt/sM7ROWGWhHh6dP30CpM85px7ObG8QQ0LTCNC+4f3jAM6GtJu9w2zS0aYBqaBaIzqOhYt0eAHicX2w4LgcoNcHA44OHioN/Sh1SKihNo7UZdynBgfBXfOC92WrDNHmk4nFSE2KIyFXBa4eD53D0/OIWWik8XyO05oA2q4zpeIMmBb82Btt5FU2IhVcGSdAGpQCdaS40eYukEh7WjAMANz0BVMK671jPosGAwjQdkGvDrhY479Csg3HMqanGANWiNIUVFrooFBi44xPc7xuZN09PHFQphYcHri8He0RSFdUAa9IommurP3BIFXKFsQr3AWh6Ri4V8RyxhgRjDby2CCmT8jXNmJ1DEKtkP5M+vqc89FRN021QG4M9ZxmSVzjroTy1uHc7jSSUUmh2wewPWLcNuhoUM2NNoseuDU17HE5P8OL5Cxz8Aarn0CiHGDPuz4GOd6hQyuP07ITzuiJU0ndjTnBuJgNHAy/OAYdlwkMkmuX8BO8trMJVfg8QCtfj0ipaAULjOl2qgvUOsRlUM2PLATlrWNC6XGsLuAmxFCRB5kMqCKsEk1qHaggS8Pw45Arcrzup7dMRFcBWAOePiLUhQSNkaoHmacbkPdaUZOCtUFKlW6ohm+N2Y4RCTgVHO2MPCTEyx+n+7jmmaZYBHG25b1e6vp2OHiGSMuYmh31NiHBIRWPfMw7zSc5bwTnVwQKqRiEK8m2swrpu8NZANRo9dPaS1tSIzYcjSs54fg6I5eNP6z5RY2O1YwMCFnEAKT3WXPQ1aA1b2Qa9qFOMOKllk5FkstBD9LTiVDSEJDareiSeVoG8KeBU2PeAeTmgtSziRKCbFfSgulJoQVtyEmGtRYoUpT179gwxXhLOWXyX0Tz1aXZMmR2soZ/86bgM4S5pJSxwai3QisLtLNarJWdYQ9pcA0V+SmhfMZUhMmZ2iIaxpCR0P/2cqTOqrTJw0Vg0KYSpp+BEohWG93Gyz8KxU7g6PQuQUCpNW9Ce2s6ijdoNrcmn3PddpucWxrIT74nBObMROxxmIi1iOwzQkYyCuiKGAGaI0Zrqk4smRSWvh2WZMc9EHbZtF7tgfoe1FkHTMpxTEswYQVt1J85TQLfKdFaQrdak6FYIIeK1155Baz1CDEsqSAiwkkRvDP+ODY+DMkRsev7JnkmrAMApWCCtKOeKmhPmZSIVQJDLbteYc4EWoem+UWt2OC14uD/LubcwICWS03pS2HKucM7j4eEM7x3meWbhm3fsYcVkZ1qBikbIO0/USbJYAEHezMUX0Tm6rikFtFZwd3dPMwdeyaJHqqi108v2R8gGUYLLdWWtRdgjAEHT8gMRwlyhmoJxVigCRNDQ6qCpoinYyWHbNjZ5tWLynqGbaDgcFnGOI9V1mRYYo7DKQl5qxOKodam1AtZg285j0BBlk+v0oBhoSHI4HAAo8PZt2PeIbd2xLDNiyOgCb61Bi+Y98rrX5KJ3+lp3S9SgRuJwpG318XgcgXspFcm8UBKiioE0xshMEWP0sO3twx5q1gpK4aStNzmlEHUs45zyO+sDGa6zDBZtXftQK4p7HJz76vj+R7Ez5vmArZIn3rQG4FH0QcKMPQw0qjaShVJwen2B1RZ3e0HWHs7P2O/PaGgwteJkDdY9IhfAzze4sQsg1481DsvxgPPDGbFKSKKgvqVWTMcTfvYXfgFGCfpvNJ49O6HUgvWB9JTlcECICeu2oxqPvWSEqvAQK9ZUEVKDdQvef/6Ca5u2yHKfTfOM09Mn+Imf/DFMk8dhdvCnI7ZU4A4LfvEXfwF7oLEKKZZcP06HA/aYsMwcXC6HGVpz3T+fNxxPZ5zPO/VDkeYj03LAp9/+LPfInLEsFk+fzcgp443XXxsOZD++zEJrNYgxoCmL8xYRYsbduiPnzKl0SBwg5Ir3PnyBJ09usMeMKLlTTQHpnk5bOUcGBGqDJJpYa8kO2GJEjAGn4wkNDR98eIfaqI8jS4S073me0VAx+wnTvCCmIi5Z/G62NSCkglwSh3SKZkVumnA8nXB3f8+8nz2hyJ7s/ERN4bMb3N7dI+YMbRpKToiZU/CHh4D7+x3H4wGn0wI3U5OTI13U1p3ZM8fDEd/5zjtYjkdhAgAo1NHElEbelps8tnUjEmAZAmqNR5TBSGuKFKJGCnEp1AeHUmAaDRX2PaC1hnma0KQA9U5RT5IrmiNFV2sgpCKI3S6yhIx5EZYBGBSrlYaBwi4DKCW07uYZhrvHJC5fCkU0hCFG5FLx4fN7AE3W8EqkJhfc3p4BY2CcRdx2RBm2t0pdUEhsWJ1T2MIOKA4flnkBFPdSUhN5r96fNzw5ncaAorYyahS6/XKYte07SqVdMQOVGbbaasWL8z0Hz00jigY3NwbHhrQiFyKztEgXWnllRhWDKWmOdTgu0FI3FzHuqpXPMzkyO/aQJQtuRs0N9/sZznI4nWvBhy/Oo0aDqrDG4Hz/gMOy4IMP7xBTxGGZkMIOP3uUlhFW6mVjjkJP5Ag9l04LJeiwbRHacsAdUhbNIofSIQUshyNirUIbjPBo0FCIIaLmiiLGY947xJAwe4/DstBWO9KY4OMen4yKBgxefeemkm/PBsNZN+DUhoaUCQOXXFBUeYTcdM1BE+4rtR16FImpkE7UKRZKXLSePHkKaI2cogRY6fF8rTWUJFQBKbyMuGAty8XNqh89Q8KOYCg/UCc2B7whQ2ZxQWqOAoEZmhn4gUoYTEaKIAU4z+YslwKnLFolP/fJ6UTUKjLrA1qsK7MZz384eKTkhYpm5EIqTMVuCvN0EjEooWFe9JxQVMkAKKWKWwdF4jHuY8K9dkqS56S/VTpqLCJi69SdXnSSYlNxOt1IA8agw23d6Gkv1rhsdMwwkgCAZVrQrWq15kTdWTtcxrjI0N1pnqchbGcjQ5vOdV3x2mtPsW0Md+3fu3MWRlkkAq+cpg+0bsL5vMGLTzsAzIuXwDdxIIsJx+UAfaLgt4syGWbX2EAaNrM5E+3KuQxUUAu1imGmGrUlpNSEKinTCDehlITb21sWnU1CbkHqHptcaWjFGWxZZiIUQ9fj4MQKuRtyDNc20QaJR8FoVEoTF8BAe3VjrIjn+fcpRizLPJCY7iJ3CYPFoHgqpeGnCSmSHnW6mfHB+8+hNeReMcNYwxkjqcN1OOf1LJf+2a2YjTC7p2BenCyYbEx7ivG2rlgOy9DLdGqZ1haTNyKQZGE1zxPu7rmBZMkPKpnUVyRy05232HfmBzx5egPvPfwkeTvC8w2BYv5aMmY1Y8v7sGTWSsFN3QaTBgc0WjBixUmnvMOB13yfwgMYf74OAT4cZnk/PBekEpKqdz6fh+FBp5h1St7LluH9PrKWmpycM1J+FWTzcY9UNFLTUCJ0dZbrHmljXBOM1chCj7aWNEkoIvQhJrSqxFoeyKkh3p1lGu2QS8UyH7Gdz7i9vcfhOJMJgB5yXJhFASL2N6cTGNibaSgyLWPYocT16Lxt2EOk66Ix+NQbr+HTb76OH/3hLyCEIgONjFgKJudIaQMdomZrUVvEzc0J9/crbp48wbavqLlgth4hB8wLA3lLrEiR1rBbiLg5UENXakHcmSnVc2reePYEb3/m0yiNE9ncdXJaY/KO1sMaUI00soY2hga0eaerVRd7lwYshyO0NjhI0dcRXCgIWt5EK0t9mrEO83yQdTxDgWGaNzdHxD0LWl6wzBPpZkpjOdxArytSoQsWAMS4Dnc0PznEXJDv7uGMRSoFIe7YpaCvpWKaHQAlZiMVKme0CpzXIPoLFta5FRhLl8pcaVRkerAyAEg+WgMwLwugFKzzyPvGolzcxgAwHHnbcbw5IZWCSWkOja3GttERs9aG2griFrBuO7SBDJIUamHuyuFAQxOfnYSpG8SwIaXIpkuGp/M0I+WCnCpyZZ5Na415aLLuplRYl9SGdQtIEnoZU0ISOmMuBVXE7Hf7vbjeKtKcYkTZKxRoKrUL9Xnd2MhpZQDQBY4NWGOwbKukDmpSpj98fgujDHJpOOd9DFpZTNElzRtL6l0uKOJMWZs4mzpHylxMiKlIs6Kx3u909xInwdIig6pBw6z7h42uu7WMwMmQ8tibS6uIcWPAqZMQZ+uwhRUtC/NJqGAhBOwh4OH+jMPhgNdbw7NnT1CqYpMWE3N8dMO+7dj3DfcPG7ZtxTwvOJ1OOB4WaTY5AEspw84ztn1HDBlPn95gnmnGo43GYg5g4C1Qc8Iyz3h42HCyR+gexyD6+mWaYLpWGhy4h33DzelGolm4l5Iy37DtEQ/393hyOmG2HrXTXEtBanRrQ204pxVG05iIQwOCAEOH8TGOT+aKVjO8c/CyAfdpIxcnumH0RuWaetaTiQGIUUAW9x+Z5tqLBsU6O24E5yys9kL/It2pbTu0Nbh58gS3z3d4T1qIAiej1Xp5L9TupN7wXImlr6lOfWraXZKWeQYUqTBdD/GpT72BfQ8MesxliHOHtXNrMk/A2MwaGgONINx6y5RjOrEVHOaZWhtFGHCa5lGsEsnYR0GXcxWqyiw0FYNp9sM2+bAsgx6kpdE0mgUrqVA7cwkOC7Y9wE9OHOCAWhW8OHEBgJlJIzudTgghYJomcenYR2aHlnwQpc2g+nkndCQpqPrE/7ogI1LDZtYIEtcRNa3doGdZZ8XSktajRqbf1NlwQhIjXfaKIlqhtMW8OJQioVSVnT8pPVzgrZdNUfPP8zKjgl74RCkEgQNGFg3TlwFjuztJu1zjlXk+aEzXtt4B0vSGvfPOs4Tj8ZznWLjIeULfWumBeFK8r4aVdKllUBhbZmBmR+O858JgpLBBtwOevJzrhNACtLbQRothR4FSCc55anwSF57DYcaHH77A8XhAzpC0adpOU6+zIidJiRaa2unmRIMAxcydGDLXBUWxq/cTYtylUWqCuiWUogSZU0Lp6gG4FCKfzxuMM8wc6knKqg2q2NObE3IBYtqxns/w84QKTtc6ssFgzjLuk33f4P0lV4bDC4d9X1m4eAdriT7mnJheraTRy0rC5Th9nDDLakg0y0mKdqf/TdPFfrr/HIDYO7PpDiFIE8eJZBZDgk7R7dbaxlzW124F3nOCXnY6pAlLGwMG7zoq9+r4fkfXWj1/fkuzVo6poZ1lk1gLUiD1uq+BF5oidSfaaCij8XC3CvUYvI9ShLGc2oZ9h9IvoADsKSDdcw9JO51Fvbe4u49oCiiVKEEKdOE6bys+eH4P7xwOJ66dh2XB/XmFVhrz5JBCZBNhDNZ1gzUa0zyLxvCIfdvQc6DCFlCcR0oR2xYRU8KLuwfsYefEXxvklmGVxjRPsMbx8bXAaoOgAm2EtUI98/7uewcAqPszUokwgtobx7wYJYVdECOh2TlUYEzuvWdd8HDPoibUiAoFazlwRGOD1Son7YfjAqDhXAJF4oFuYQ0J0dNm2KiKLUR859vfBYzGaTlyoGHsoJOWVuCtpa5Fa2zg5H3fAylS+wta1hu6eHJgQoTGTRPWMw0L1n0bBeQeA5w2mPw06Okhko6mGlBzxhYijvuGbQ3DWIgnsI1QwvjwAGcc1nVDTAlQCt5a3N49IMSdeot5wewnbDHgRbvjkFQo6Vw7Fd7/8Dm8JcNmmj0SmiAfeWhCi2iYJueRayIF3FiUWpHSLREUQ9Ol1kSHaw02w8dX+Zxahn/MISlImZN3gBP+JA1cKwUhZMQcoBRDHO/bRiq+aHkmR/ezUiqC0HG9nWgD3umbasfp5kTNybrh5uaElDM+fHGLZaLGi/KICWugWcxhXnD/sOIgA8Rti4KWNygLNEVdZNgDWlPQ4PVGx8sG92zGw/mMGBMHYyFCgTqUdduoSQKHwfPhwKHAttLwoFWkHAei090+S84clCo2/K0plJrw3jvv4Y1PvQEojW1PeP8Pvy7XfkVIRK+8I0vIW4enT55immbWG9rifA7QOiKVCmcUzORwe3fP95Ey9FkjrBtag0RFdAc2UnJf3L6HVoH3P3iB1oCUk7BTAmY/0dSoVCQJa/XeYH3vA6Axq6dryic/wUqW3boFnM9nqSN5ppLsWwpgM1yop+tGRVUYXdfr9g86PhkVrTuMSQ5D33S11lgW5lr0YDwlzUGfCBtjpJNtY/pdah2NBlEOj33fSWUSR6Bt28EiQrzXhYZyvr/H5GkFrBSnHinngRr14rC/TwDjOUuho1O3amXX1YSWlKV4ELgOl+eqIUjX2qlf4iolwX2dH5+liSq5YF5mqFrhRCCtmoY18l6dg3cWtVBkvyyLpNcmHA5HociBzh8wdM/SPK+H+YSt7KKnaaJxIqxMBIkFUM4F/sg04/KwYV4WlJyQchL6XoF3E7rjEyfHE2oVIwbyica5Ox2PSClijwnTPMNMGqqpcU1A8fWVNvxstQ5NU2u0M37x4hbeO5mEX5rO7g4VQpAN2hKaVkCOGdu+Y1mOYK5Kw/H4BA8PD9DaiKaJFL6b0xEKGhUU26nGSVqM/GydWqW1uzSn6DxSNji8GUkF27cgEzUFoMJoBz9N2LYVSjWKDRWLe0LnDW72V0imwRoC9TFQggxQi2Q9g0Nr0UJ/qDjeHLHvLGadtdhWBloa5eROFO2aI/2NtEM2CrtQFKxVOEhWD6c1bBIBffV7pDmez6tMbSDoWRj32rbxtWqja00tHU2wUMqNQtxYapsYNkqBO40fyijKFUhr68YCRu4lJcEXJWfMsxP0bhFnG573Jou/mxaENaKUrifjhLff493wwHuPh/sz1jNtLWMMgn4YtBaRUkTPy7p8t+L0koXKWgoOhwU5K6EXGuRU4CcrRinclEvmlHY5zANB4Xd0ua6vaWTLsgDglFobDQM2y7OfAK3kGu326tIoyxrUDU86Na3/ub9OR6/rx1j8/7Qf9/cUo/5P/hf/2/+G38mr49Xx6nh1vDo+znF/f4+nT5/+wN/5RI3Ntm+4OZ0AXAIKR9BdIKxJ9zOMyehyWMRLnfBxjFEMAug4chDniQvsOw26TQgR+74NhKChjUCiVBvM5AeqYayBg0OWRPI+EX25uVEKOB5ZWPSUb9KyLIwysCYJesPnAgDvjcxA6EbTG5dt2yigd25MbIEeBlo4KZfPu22bICgVy7wM8wQ/SWqwvF8AOBwO2NZt8Ou9JyWC9LcEpTUe1l14lBkNFdvWG0KL1jJ60F9HcF577TWUnPGwrgyT04aFonUijGVxP08e+7ZjWkixi/sONXksywEx7qOJLbWgNaI9pRQo41BrhoYZDjkp0+Od6bENVluc7x9wlMk6IBkpUvxO8yRhk2wA+nR627og3KG7esWUcH9/j8PhiHW9l993OB7l/ZCvJfQ4oofTZNAd6qhvMqP7N9ohZcmR0RoPD3fiVMKsglwK5mnm5Mio4QKnNTmiMZB73gNCNYygbB77zqZGK4VYqacx1iPlwEZdtBm1kJt+PBEV6BoL5z1zFFxvIiS8NrI5ZQMb5boHrO30LwjFktkt1mp0B8D+2bs2bjnQmSulbUwzOwWrn6OUMpTRWNxC9FSmPgBIWykFXpKPQwjIuV6J/6sEmEL4yVnQR4Wwcy0JMQ6k4uGBRgIp0c5dgdOcd999B1XMIrrltHf+scOf0LkGslnSuE97wN814txaQ8oRPQlag+sQQz3Fur6IHihE1KpQCqQJp1Wr0hiW231gMov1dacQXudW9fPabdkBOh0qpYSaSsek0ml1pSPFbZzDjoD2dbc/vxeXnFfHDz7efvttfOMb38DNzc24dl4dr45Xx6vj1fH/fUdrDff393j77bf/xN/9ZIiNvRShnWLUC82+sfbJft9se+EwLTN2KdY7t9jJ35fWOMWUjT7nDKP0yLDQ2oyN+mJK4AgNipvQdQZMk+c7rytaIIXMaI0s6FI1TOHtNBGADmm1JXqYtYpWMpqIeHNK9OmX0KU+TT0ejwBYxHXEijQYUkacJCiXKyTreDo9orn1Avs6JDNGih6tI6VNPvgjrUrWWlKNNYwyOB55/nsDVipgNBuDEMJIYXfOwRoDK6LoEAKSoC8xbLSddH4IpJ2d4Ay1DN5NnI5T70nKFygc79Pt9bzRzjvsV7C0weQmaYYKtjVgmj20IZJihLs5TRMhSKPRsgSXClWoT9yLaDEOUtBt2xnW8rnQQMtHCVcrucIPMToANKwrQ8OstZiExmi9oQPZNKG2irsXt/zeSw93pPvc/flBckMalErImXTJaZ4wLwfEsJMfqy/5L2zgGBDZdRS1smDuRe2+B2Y+KY1p9nQMu2q6uoA/Sw4Bm9wexkikoVOQGFoZYHRDLUmQF/Kk6aYWhHrlxLmOCMLzF7do4srTRendSrkX5Z0HnmoCGgaUfE23YjOQrihVGqfTaaAMnYrZP1uMUZqISutIrah3gULYOexgU0ReumrMqiJ9jwYSewhDR9Xt2+d5ktdM4/6iNkUPQX8UaJz8fi3ZTXrwrLuZyOl0wrbRWOOwTNhDkOuVjW2KSZK7Sb2cZw5nYoyDRta1BFaEnP16742P0grLvBA1zqRSWmehZT3rj+HaQeoPM8MujpS9aerDpVfHDz601vj85z//3/TbeHW8Ol4dr45Xx8c4/iSkph+faAe8JG3b8efj8TgKFufo5LRtG7q9cqdjdGZEp2H0BkQbDejLBLObAEAsoHMXqztHPqJiGBekgOnhhwCLlVZJa7u/v2cTIE2YNgao5L6HnWm7rV3skrWyUiRy+nx/dzdE8dQnNBRVoK1Q6lqDUfTA10phXVcJCqTFXojkBjMx3uLmyRPJ8jD0Rle0AuZnIKqTCxGOHijVE9MbKLquIianPzlwPJFbmnNmkak17u/vYQWt6TSrLjSOsU/vK2qJcN5jntmQQYFCvETEJIqQ/XRzg3VbYRrTYVMumOaZXv6OAk0K0/3QrRRxlrmEhNIBjI4wHk0FpJyQ9yw0Khboz5/fwXuDaVowTRNddcTtqTeOQB6ZJN1tqjfYXVRfaxoar1aVaD8sWiuYJivhlWAWiziElZKRiwEUtUgpdV/5AqUtnNgjMkWeLjJ98q8UyVI92G5QrwS5ItpFHdrDwz0OhwWtVmhJB7bW0cpR+LYUtMcxPKiN6EmR7IIQGPp5PB5IRSsN0zQTxUmkIeZE/r4CRpNUxVKyN8ellEElnZdZQusYMKqsGrqoTu2cBfnsDRqA8Tz9fuY9bRBTFLOFPJCEi2XxResGoWHt+4Z5nmGNhdIKYadGQaHCOo0Upak3GqZWGOOYvSChnT2Is7+fTsEzpn+/ZVAO53kedDOAjXM/113kP5DBGOVezDivGcuyiM6J2iIFDl9KrYDmZ9r3/ZGerLv98brI4zlzzliFZkgnNlo8G6PR1OWcXgJEPXpI7Xi+wOeaTyfcPzyM967wCoF4dbw6Xh2vjlfHn77jE4/2rAh6AQilJwFaD9rEg2yuPdOhT3FbrWPTLbUOwXR3qujFekctcslAoetaqXRUg2KeR89KKTKJ7hQwpegb3ot5gMUAxX5FeP8T7m4fYB0901n00zWs9clupqX0HoS2IkFsDNyjw1lPZ+/BkXReW8SRiDoda1goFxESF2nc6lUR3o8i/52lKDseDqOJCTEipghnqbGwxqAqjfP5TD2IIF1R/PKdmDvs+w5l6DA1qEVaix6lQJeC2ipKYvrvJMJzQGFeFuoa0DAvC5wzWLeAoiB0LD0aS2O0UAmBZZ5Ra8G6rjgcjpi8wy56ki1swwJ3vG+lYJyD0gwvVFpJZohBSkFS3fNojm9uOD3vdB+aJmiaRWQaCjQoaNGc5Ey3FGhS3wAMFxprLZw3UNDQR2YzWUN9iNF0HnHOAa1iDxHeWwBsbpS6IBkxRhavho1ER6EuKIfCtgVM0ywNSIZSNCIoQ+el0Syb6446WXFKyilLsR5RCilUpRbsexCHQsn78R6lNvhpoQal0b42pnRBYVJGDOfhaNOd5/Z9l0ZI7qd2LWifcCNmEt09rV+727YNx8BdGo2c88iB6Q1ZF8wzSHQa1w6zXSS4S84la3IlIatiLmG6DWkWlCUOKqC1WoYKC0rJ4/X6WtJt2ruZRUdUh2GJdcNCedvW8T77UKbTL1NKOJ/P4/s+n3daNzs/EMFpOshnBXpmUx+8zILQ0N6+jXM5zzNyaYAy2LeN57Dk8fc92BS4NJKdcjbPM1KMuLu/H+9bKYVtXz/p0v7qeHW8Ol4dr45Xx//PH58Ysel88V64dYezjuR0vUkvIvoktYl+xEih3YQOpJWGVRjTfVJ2qG24DhpMOUnxgbHhewlCLJm2mH2KCXBKzaYJo5mYPF1hAKbXN9PEyYMuQmZw89nMuErbXGcMtLXIMUCNUM82Jt9dwJzl/faiSCklbg4B3hGFiekSoNgbva5XuhYAbxuD/gAKyp31cr4LUuQ0WpuOnOlB/bu5uUGtDeczG0yIba9WddD5lllCDsV7XSmFFpjDUTtN0Dk8PDww/KspbHvCtu3wUpy1BlQJrbpG8VJMEgDqYHSDQkVOkW4qlanNJ0GarHVSEBakWEYTrEUL0gXszjkxkcDQ3NCisg4t0bigrQPEgnh9OMP7Cc7T2afbQ7LBYW7KvgU6mmnade4bnY+2fWeQXqyYZ4d5mmGdJSJmmQR8Pp+l4NTY1l3c3CZs2xlSnYsblsWykCRorZdrg/lI3nvsW4Sf+nXTMM8nbNt6df3zGjUS8JhzA5oGGhuHlIh88TspeBBRdKd1ArxnVnm/xlj5f8+JyuKiQrMMUiojtGY4ZUpZTBr0uGa7IUh38mOWThuajyRIzf39/biHr4XvWZARyDXNe5rN/sHPQFvFmloP9DKlQqvY1rN1gLu7OzAfiZbty7KMRqKvJUksq62jnWnOfK5lmR7pXPYYQHMFMxC//vl67tM0Tbi5ubnSxCn0PKjeVPX3y8dfbJ61VpgXBut29Nt7j23bME+ezbJexL2N722aZ3nuMtCdgfgIKrh33WJr8IZW6osE7r46Xh2vjlfHq+PV8afp+GQ5NrLZ942/T2d7wnwvWnqxcD057cL4XoR2DnoXyPdCoU9JnXOkhgnlSItO5fzwMALxSK2Rgs0YGH0phGNiDksVdKiUgtvbO1jvsCwLdUKNCa0pJVgJRrLOoeUmIn2FyTC9voQIZRSUs1AAYqC3f5+SasMwJlrTSUFSK4xi8dfPQ3dFuragPh6P4siWhoAZCtDWQCkgrOESZijn0TuHEHi+jhLOlSREK+eM0+kGSTJqWm0IYlc9L/N4bSX2ekox1LFVhg72vBJqp9zQGxwOh0Gh6Z+lW9RCJtBJNDu5FszTNEwNusvbVOrQMEExE6hq/WhCfu341AvLaeL3sO+baGmqiNwzlAYeHh7gnUdOGXbyQjczaCCtqRevzvVmO2Lbdlgj6GProm4tGQIzTHfzM0BKgeesMjdl3+mI5T2pdylFtMT3RYqclqC3KvQiy7yUKzqgdR7GKMyLHueYmqh9XFe8lyQnJW2jcF+mA7RSeDivbMQFiViWhXk/wEBLmpxDibqhDbbSg+JExAjDNKCjCCHEgbT076NrxZx3iDsbCOcccqaRRm9qrDVI4mDYkQUijEpQjIDW6iNb4pzFZVEBxwMRw22LQ4QPFBjnSEtNpOMtywHe2zFA6bqW3kwopZBrxeTZRNVMNNf7Ca0J3VCGEIf5IIOaghgYbkf9T4ZWDTF2y0miat1JLcSMSeij/Vy1VnE+nzHP07iumaxNrdBo8KTx7EYMVfSAh3nGed9A10BS7Zwx0GCeTssS4GaMWHcWoDa4aUKVdffV8ep4dbw6Xh2vjj9th/6Tf+Xx0S1HO3WjO6N1ilrPiugam+tGpheXvQjpTdK+79j3nciKFCPdjrVrUraNxgOlVqGVNRH2RyzzjOVAj3AAkkCfEFNCzIlJ7d7hcDo+otF5f3FVSyVjWmbazooVrRVE6HA8ws8TluUwtD3H0wmTFI69wevc+lIKndtyxr5tI/23/14vYPtked+JRiixyb0WatdacTweBX1RA62i1Ww3HGiS7RIfNZxhD6Qa1YLlsGBZ5u9pHJhur0dBllLCHgJCZGEUQ0BONBfwYq/dv6N93y80H5BGl8XFy7lJnKNO0MaS5qTUKKInPw1ELacErS1yruP6uJ5692vncDjCGCehjaSmTdMEZ/0IMnWOE/FNXOO6VXjKFU4yirodthNNDO2Pw6X4FyRSacUgq8hclJQzDsejoCNES9Z1xfm8itasQiuFyTm0VkUIziaou7N1FMEa3iOrpGonoYt1967uhJVzHsX65CdYY+Gso8PdwwMUqL9qQmkLgXQwKxlDHU2EIJy9UcwljeLa+x7s2HUpzHS5ICy8V6bJY57o/Z9TxrWMo+cwdUppa0RFu7Patm1Dh8VQXSdoURkGF70BaoAYT2RBLzehejLHSck9TFrjJahUG9p+e+/HeTTGAKKd6XqiGCPO5/PQ5AAQw5NtuI05T0tv7zyssbDO4yDXXQhxaG0UDOaJYX+Qa9c5d6H0yT1ba0XKFZusdf0zXdPe+v1srMUeOFzo5gPbtqHkAidZTt57WGPgjIUXKl0fNPTnenW8Ol4dr45Xx6vjT9vxiRobK+nl185ol7yaMgr1dV1Hpg2AR7bQPXSnN0G9Kbq2Qe1FCWlacVCoehGy7zunlk08w3q4pNaY/IQkydzWGFTR9EAxOGtQ1AAGhwGjWNrFQU1LQXKdityLkCoFVH8fvQDv+pUeNBQEjVoOByzLgoeHB1xcrtQI3OwoFVPGL2gYC2UK5a8RsN4UzfNM0fA8Y9t2CXpSoxDdtl3cmIKEzVVqXZSmHTMgmqHLd+inSexw3ficDWz6+neVUx4aiR6I2PUCKSUiXvJ8ewhYd4Y/MbyMzdnhcICbJsyHA+bDMpAFunXZ8Xyd3tY/eykZztOy2VpO+ol00RI4pwxjLa2VtRaERCiMUCipjJTkfh5Sysi50yZZ1BPVUQghYpp4vc/zQnrbvmKamfvTwx2vqYnWGaSc4b0dtE2leK3Ms5fwWmAP+zinQA+dasN0oDf1vakbTlf1krnDxk1sgrXC5D0UNMIexvS/Z6rwaFjmCUqon4fDgmny0lTVRwL7TjHlIMMO4fq67XKtXkT5MaaheXHeXazRTbfr3tCDeXuRf1iWIarv9zqHHaTE1VZlWMLXIipM44YYA5SmccmyHB6ZESzzckGQa4U1FosMFbpWh8Gjy6MG6FqXx2uOGUJ7CDRCMAa5JBwO85Wxh4axRPBaa6NpqbXSFvuwjPVFi7ZtEopY/14BMbwQVGd8p+A9z0ynFcu8wBiDdVuJ6PEiwB52vgaUuCSyIX3lHfDqeHW8Ol4dr44/jccnoqJVoVdcuwX1iSqAMSF9WegaY4SxBjWQFnPuKccyQe4uagALvNvb25E/s67reG6tNZ48OaIXgFUsU6PYoHYufM98qeIqFKXR6fbH13kX1/bQAzkBk3/750kpwQpVi++xYt+C8N/bQJo6kgVcnLr2fR/NCICBZl3b3l43CB1N6Y2ecxbrusJ7ajqWZbk0UgrS+Ch0bQAzSliYdTRnXVfMsx/BiJ2e1yfW/b13a951XS/mC5IvEsXilpqLhG7p3Z3JqD1h8KiX89Yb1WvHKu9n5ByhtAGEotVywuJJ2YmhICZaJJOW15sNh3WNaCmPKTdfwwz07XrCzWaGRgssqCUht5HK1wNae5Hf3++2UdthrUatvcAvQp0CtAb2fRsF+zzPgypFtI0W1rVRwxMlm6U3SpfrjkgDbZOZ4dNTiHne6BbXwOu4N/8xMfmXrm11OHqhNSn8MayO6Y7V4OU+miY/KFTdOSxnhorO8ySZLby2nLVDf9WvZX7XBilFWGuE0kWNT7dMzjkxhdgw6bpf+0qR8kavECKMjTcTDRScQy1M/AaAdS24uTmitYa723ucbnjfA4C3DnsM2OLGZGJcGgUOJwogjWatBZuEo14PELoNfXdr62tRbyh7EzZpmlcw+RtCdfVwziKEgFIa3MRG7NnTZ8jSeNZagCa5VvK6XZM0zzMUFKxQ6rZtHU6Lvbns541DBCDliFqqPM4Omp8RE5WuXczy+G5//+p4dbw6Xh2vjlfHn6bjEzU2IQQ8ffJEMiDiaFrmeR5uaNfUrM4fr7VAV4VSCylfk0eT5+vOSdcC8OGMJjkSACk6fXpZxDGo5IzaLrasxphRiF9TTHpx7Z2l0YAgTB2paa3h4b33YIUC1Ok7SqmBOnHiWqV4oz6CyBQbkdOJttfn8zooX724nKbpEf3sehLfNQwdzbHGoMj77qL8MeU+LFJMOim02yjyIRQxWkZfEDBrDbRiQURNkmgUYmR4ZrvkZMQYsa7rI+e7dvV9WGsfaYUwimdgEsRGa411XUdTeG2M0JGSUmip3RpzZdhcdC3NBGM1GADJ5z+fd0F9GCRqrBFa0yVT6HItOTRxACu5YBFq03qmHbe9sjse36vQs7oIvZQG7w1CKFKMUyDezxGbyIuL1jTNQq+62AQnacBiTDge3Xiv27ZCGwrfiRD1nBnAWo3zeYW1DstCkXipErJpzaAa2kZHtUHJCmw0iiLNq0qeSheu9yEEXdRoe92bq561UyrG97puK+ZpGlSxfi9SK8Lsmt7cdjt2NmaZGhCIffbQG+ERWlNKgXUOil+UoAwe2yoZSNuOprtjmcGTp0/QWkFrQKsVscaB7FahDzKTh0YRSJehRRUK3hg4NIYwGWl8e7P+cH7AMi+jyTfGQCsFqynK73q6dV1F/N+HFJewXjORbqiURkkZcBeb574edne1lJNkQZVB2evXCHDJqKm1wFmDwzJj26Osf0Xs6ytKo9W3tZYUthhhlEJG+SRL+6vj1fHqeHW8Ol4d/39xfKLGpvPTr4vunsWwLMuY7D48PDxCAfj7EbU2HI6HgXJ0MXL/nZ4Lsq4XkXTXGvSGAABKudDDlBTSzCxpyELp6EXCJEnoWWguy+Jl6m0fUcwWMTDoRTntkgMOhwOUwmhgOmVmnicopbEsfojRu0aiH5073+lF27qOTJHeBBzE1rnrBJjVYUZB2dGOeZ7HlFlrjadPn44iBxArW80MDKOZRdK1E0qm7/M8IYY4NBwp8LtUYlM8TxMCMF6/n8NelPamsWcWZfl3kfPC7+ZClerF57Ztg8rUp+J9yt9pbb3wJEIWRItBepFzNCcgPU4PLRBAdCLIZyo5oeQC64QeOBE12NZNNBP+UTOYc4FzGq31sNQ0ClFtLIy5ahCMRQxpWDkDSppgiBsWc4CUurhlAQzwpNEARfvdFOPi9BUQQqKxwjSLZXg33dAwWsMtB9LwBGmA6Decc7i7v6O7WWtomS50MWZm4aRLWCypgEQdu4asa1h6Fk/OvHcWKar7e+z3Mu/b8Kj47g1FKQXbuuG1Z094/lPG8XDEFgNKztRyiTnBSUJqu4YqxoiYd8zeIe4blAZ0T2KpRLRyqkPHVeT7aK3Bef//ae9KduVGruyJOUjmE2ptuP7/w8ow0OhuS3qZScYcXtwbkVm7EtBwW644QG1K0BuYJHVvnGn6usaiPoqB32WWIYTpP+udwkFqowJXeuYp6MFYCy0lQsoooGfKKE33XgduGzOWKVMEde9ABzbnIaXgawh8fHwQA9wBbV+eqXc5KslE9Vx2RhT2YLBHz8/QlWmtcew7RAfOGCCY7Q1n4O9NhblKGfKPLSwsLCws/Mnww4vNGELfJV5D0w6QZOxd1kWSKOobGcPjGDDG8DvkGTlntN7gnJ8DyLv3ZpzMv3/fMRxTmpLE8/sT3m/IJeN27HQazxKbUeYplSSpRn/p30suKLXOhQXA9ACEEOYw55zjbpU6F6Fh6CcvxhhgXilTACBYIje8DGOZGybicUo8pDBDm/8u/XsPZ3g8HtwTZGcLfG8NoncM8iulBC0lpKA/D1eAVoq6OgB4a7FtHpXjZAfbJoRAZkYrjcWTDeoCQC2FjMt8Ej7khGNAO44DMcYZETxkPqPbaHzmt9sNz+dzDtnkNaAT7xjTvM7WaGgjcV1xLrzGGDyfT4QQECNFR1tr0ProJpLzHkQHrDO4TpLOCRB76L2bxvzjOKAklcI+n2zUNhoxZio85YVSacULfcX9/iCZmZCUMCeIDfDbhpzTlBwCwHVRqhp5WgrFebMEbgQqNC6QJclYwfm8YJ3GdWZYZ0Fx0RpSKIrPRsdx7MTYGDKOK6lYIvqS6MVI16HWxlIzDa0Ncnkxrud5vckiO6TUvzOkj9CDEQZAMqzy5o/D9Hvtxw4lXhJHASpDFUXMFLRxn1ESoQZaR7giWgcgO6eRNTwfJ4yloAcqhTXz3TOe//E+Oq+TQgdyprCEN6Z0MDjee3Q+KInMbiitYLVGzZmWJNB975xDjgl23yFB8tTa6pRa5pwR35bz0hq8sXOZokXFovZX70/OebIzQgp+x9Xf+QfHu8QYy4cvHV+/fWLbNmJycoFrBqKTv2cUGdM7c3SD/cibfWFhYWFh4T8DP1zQOZKgyIC704CU8/TCvMzwrwS02Q3zlr41Tt1p2HNzqM5c9AhgmqaHXGv4U2632xwY6fsXjlEl87iQxBTkoVP3HrkUgJmSIT8bZuJhtKcY5ja9I4Mp2rYN6S0+NcY0JWbvbAMwpG8SpURcFw1f76fIggep9wLBMSwCxLzkGKGMgRZi/n8hxFx2xrLn+O/11tFKwbbvACi8YSyhlWVHI1FJKoEv2431/AVddCipcJ7X7xZNazUFLQBs2M7TZ3Ecx2RzqNhRTBZkeKT2fcft9oHn8zmHujFEj4LDcV+M4RgAWqcgAO89L4YdEA2t0wJRSoa1BjGmuRhsm0etncMQSBY0IpLHEhJDgpASiT0zAgLHts9r/u6/GJ0iJOszCDGgd/rdFCQe9yfLKYn9ybVByo5tc7NZfoQdABRi4Jwnf07MUKLDGMsLGXUzCcH3VesAIloDnLe8TL3CGVprqMiQvBQ4Z9B6hZRmRjT3XpFzB/qIJcYcdgfrKgQtqMZYjLLUsSiQzI9CASQ/S69nu8F7Mtxba/H163dYS8zYcWwz8lxIgTpkX0qhZjLzh0pLlt88JXgVeke0UgEpIVrHtm/onTqR/O4p4UQA1lg0vA5FtCZpKcCSLqWARqweakfLBYKLVAebEUKA5WQ57zq0MVBSQksFaQHLPhol5UyRAzpyirDWoZQErUgKmjJ975wKjCPJbGO/UJcCznh07r4BMJnL8R7Y3IbH8w7nDi6WBUQH+RFrReHQkysEgAuQn48nnLWQQtJCk+l5GJJRI+kajAOVhYWFhYWFPxN+KBVtSLxGIlcIEZ+f3/F8PqdsZWjDpZSw/A86KSko9amyz4OYklefQ+JI5/dQgZdE7dXwTsN0mh4MAHOo3rZtnuiHEIgdah0pRORMJ/Pk+yHD8nld07ANcJEn+xOAVxjC4/GgRaoD989PjJjrGCkiePw5nWQH1FpYBudn6tL0EPFio6SE5GVl+IfGKS95SQrC+Fp88t96h+bTZ8UMUOPByTs/ZUMjzY0SxABjNKQWgOyQSqBzg7sQJKO6rsC+FRpQrdVIKeM6r3ltjFFzSSOWJM7QBWMM9n2fDMGr6b1h8578KBiFm2XeR0MW9i4Ba5X6aejzp8JIIaiLpnN/TQiRh/RXEMRgyYTUc/Ec17V3SouSUsJqjd4avHPchVKnHEupIckqsyOko8NaB6UE9t2jlILjtkMbxSZyM6/leZ6TkRv+MMFx0kBjiSCZxkvJ8N5yRLmEUuQNGvdya5Xu00JJYOd5sUeLQyfwklUND5fkRCwpWHLWGnoHs1snUqbngtL46pRX0mduuY9IIudxEEGBB7lknNfJSXUvppX8KRstAdai5IzSGgwzKOP5LKVAceLX6G0qmdgekmIqNNGZbejzuneQf2smlgny2LgR5cxelnBSFHzJBaLTAlQbMRlSqfl+QgdyKYg5IyQK/whXRGsVMUWERIl1zjqWMzLLXCpqHX0yGiEm1NZhNw+/78i1IvK7RSiJKwYoKZFiAlrD436nyHRjgdqQY4RoHSUmeOMgOktda0OvDWj0ezd+541Y+lIKAndMKUUMFrRCRUcXQG4VqWTkUmDeJLELCwsLCwt/FvxYKhq3m2c+JSQPBevrrwDnHPcotNlbQcZwRfKf2uZAVGudJnutDUuSXt0lY+AVgk53S6moNb+1gNPgN07tR2wtLU4ax3FDKZllaGUmS43FC3j17QymyHuPEC5Q8d/Gsq0w2RGS1tg5lAOv3pFxMj6YiJGa9X6thBBQLHvz3s9F5n6/cydPRu0dx77j+v6d5Th0cm4dM0g503LEP/P4ezFFSlliVgQAYoiQUuG6Ii8Vr58boK4gv1E3Su0V5/kA9c0QWxI4NnjjUs+Lm8+dc3M5GUvJGGTHYE4dOgra6Bdr0MRbPLRj5okM1zRwu99d25G2FxOl0tVS4Z2f3oYrRJxXgFYaSkmkXNAasXghRKSU4ZyltKjEyWitYt/2eZ/QST79XLSUNihleFB99SVZ6ymu13mUnNF6Qe+0sBij0BqxM6Nk9cV+Da8URWdrTX07494TnVLPAFr0ISVSylDqJfEanSgpZR5y5at9vhRYK6enpJSC2iqUEjCGmMFt3/DxcSClDCEteYJGOpdgWZf3tMQ5ixguSFA0t5QU0S4MyasG4zbS+QSImamlzBS1xEv+kFz6zbNhniPDa4GxBtdFoRDgQ4kYua/q85P9Sw1CKXi7odQCZx1iTtTPI4Gc6fspQUl0487epEVMCdZY5PoKOdBaA1JAdiClivvnk2WUATdezEupkJDz4EJbCjrx3gOSPk+pqSOoxEbyRa2mDFIqBQtw4h3F6HlHz5iUElUKHPJAq7QANg79cJtH5+UmpUTemRRhnZtFsRDgfq5EzDTfB7vjAlMByEoBCdfj/MF/ChYWFhYWFn5+/KDHRrFZuqJ3MweXYYymE/WXIbkUkq44Z9k/UqZcaSSEUUrW6MUhAokGwk5DqRAIV4CUGvtuOBmqQWszv86IHm6tw3sHpRS+f//OJ/ACyhiI9ir+pNNo8iDEGKYUZ5QEDs9IMwZKCtT+MsUrLXnB69PwT0Oq5EVDIucw/QnGaGaCqAtlDHzvvoVpcGa5m1IKShs+Ua9QI3VJjQ4X6tkZ8r+Sy5SlafYxjYVtFCLSfw0jHpl8HQ61UHKXMRp+O6CVwuPxpCEbwHHsNOBx6pJQCo1ZLwCTiRrSvbHgClBXzljeyHtUsO+egwPI92S0gZAN1vopLwQ6BCTJmWqFY+Zl33dc4WIfFMVax6ugt4792JByht8cmfzZCE9dNvR30YkBQSM2qSmBLx8H/vH1O0rtdKLOcdCGu28G22GNhtL0eW3eQ4AKF19WhpcMju7Nxh4oN1mlxgxKa+SfIJM4DdEhJpYV0vPyfFJXk7U0pDtr0UXH6CkafqUR4S2lYhO843usoHeWKVViHASo1PLYj5k4VgpJDOObj0wqiZipm2YssYM1ee+qGgtOqxWK7/fNOlwxQCo1mbvA90RGgQSlC5ZccDuO12LMLIVzDr021EYsG4SgQT1cxG41AAJolSLLjbMk3+OFo5aKz/snMcitAh345ZdfZgkqKgD2NY3PE2iIfO9V9uaklFCFgAKgjEauFaorNAhIpSB1m8uEMQpXDPNdAICXHYvKyWUpBpSc4Y2d4RylE/skWCpKvVzEU4UU5wHPkMaWTKWxkuO4W28otUBo8hUpqdDQUFKBdouxWVhYWFj48+GHpGjDf2GMJZNuqdPH8GqH3/EqUJTYNj89FJLlQPs+yvk0ADHTgIZun4anjhDi9EvUmufXGD6d9+WC2CODz887Ho8HS9rsXB4iN3mPxWLIh0gyxlGrSiKx1GPbPKRWSJVYI+tINjR6UgxHKve3AAJibgqbfmkAHBHG1LTeAPEqBB0le6PfZd93+M3TMMRGbe89NHf6DAlS4yVs/N632w1Km8nkmLcwg7HolFIgOF669Y7rCrg/HoiRT38FMUlPToUz1kEbi5gysyovo7PSCpYDHobfYfT1eE9ypQ7ynFAHjYaUgLWay0QjtOaY795Y/pVBMd4USCAkeUDG5kCpTxlSkFQqp4JSKWZ7P6gI8dgPnM9rsnxS0BB9HAckn7b33pBLhpAC1xVwXpHuqVbReoPfNpaZ6cku7PtOw2ihHqaUaHEckr4h2wrhAkBL+YjpHuEEtITT7z/kdCT/MjBGkXyLJUatUQeO8w4QdJ2sMyw10xCCPgdilxRGzDVJ+Ei6drsdkwFrrVP3TcmwxrIEbMP5PCdLuu37fP4AYpbef376f68eKKUUvnz5wrIx+piss2gcg/wuIbWOksvQyRwvFN379/t9DucQAoZ9e9ZZOOv486vcCXRAgKKhx7I8SjhrpXsyXrQMUmS4xsfHB6yzM0RCG1p2BNFgKIUOHDpLZIcc8nmexIz2juu6EBMdOOTCyWXeYz92xExMFz3fCp6XwP72rnx+3imggK9FrRXKKEDSOyDnPJW6IQQYa/Hx5YZt24jN5tJecDiDcw7olP6YQpzBKYK9jM65may3sLCwsLDwZ8MfYmymNCgE7H5DR8fj2zcoJfF4Pt8ih18+iiHlGOWZzhEDoYzGeQUOCBDTVzAGzyFdI4j5fTs6YsroaPOkP8YEdDYAKwmAtO+VY3nPJ8X8UkEiJTNlNhQDYJla42WAvl5rFVLRsNU6EK+A28cN//uPr5MhOs9P/rrkfYiJ4nxTJD8MhIBxCgIUmFBqQLgimYJ5uQLAqV+FPRIV5xXYt0KyoPvjwdeFrr8xGjFl5PSEYNN9bQ2fjwcAUAJTrWxKV6i14cGhAJ2/trUGslaEK8A7h/OK6OcF6yx5AkBLgSgFMSVsnPLWQsBx3BBjwrdvn9i2AwDJua4rwnkLqw3F8ZYLUml8/fZ1SqicNZACqA04OSIYnRYHa4kFE1D4vN/hnUOpBVpZ5JIRH2R2j4Pha43M0xJIbGg3RpEPojV8qAMxxFnceF4BQMfobRFC4rwCvPe4Px60qFqLx/MJVwpSinB2Q0pxyqu0Uuio6J2G6//5Smlu3jl83u/UgaMtFS4+nnytKZ3s8Xjiy8cHWqsIIUEbui9ar7CWyjMb+4c6Gnrpb7LPCiWBVA1aeaXXDdbx/niQsbyM/qgMKStCDHPBqDXDWc33bkSIiT1jEkIFaGOYgclQSsIaWkQam9djyuStyYUkj5p8TOG6sLGEi66hIZalU8KZtYXTEPVc/kNMkNwh0/orLe7jdpv3MaUCRkghIJVEiCfd+yHBsrRQSZLZZWYxRCFPS+ODBiUFns8TT5Yz7rvE92/fScYIMRd6Z4n5pXcMWM5Y0UuD3z1SLugx0o3Li4/fPfdcqckISpaGogPg5UtJBb9teD5PNHS6JzePz//6byhtoK1Cig3Oyfl53x8PZA4lGKlqUinyOqmC1jqM1ig1Q0h6lkp+RXLncufF7RVGsrCwsLCw8GeB6H/gX76//e1v+PXXX/8VP8/CwsLCwv8RfvvtN/z1r3/9//4xFhYWFhYW/iX4Q4tNaw1///vf8fHxMdmGhYWFhYV/T/Tecb/f8Ze//GV6pxYWFhYWFv7T8YcWm4WFhYWFhYWFhYWFhX9nrKO8hYWFhYWFhYWFhYWfHmuxWVhYWFhYWFhYWFj46bEWm4WFhYWFhYWFhYWFnx5rsVlYWFhYWFhYWFhY+OmxFpuFhYWFhYWFhYWFhZ8ea7FZWFhYWFhYWFhYWPjpsRabhYWFhYWFhYWFhYWfHv8E/4MBEnEe6BgAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from llama_index.core.response.notebook_utils import display_source_node\n", + "\n", + "for text_node in response.metadata[\"text_nodes\"]:\n", + " display_source_node(text_node, source_length=200)\n", + "plot_images(\n", + " [n.metadata[\"file_path\"] for n in response.metadata[\"image_nodes\"]]\n", + ")" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-8/Advanced_RAG_with_LlamaParse.ipynb b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-8/Advanced_RAG_with_LlamaParse.ipynb new file mode 100644 index 0000000000000..f10bbf59e7e0e --- /dev/null +++ b/docs/docs/examples/cookbooks/oreilly_course_cookbooks/Module-8/Advanced_RAG_with_LlamaParse.ipynb @@ -0,0 +1,1097 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Building Advanced RAG With LlamaParse\n", + "\n", + "In this notebook we will demonstrate the following:\n", + "\n", + "1. Using LlamaParse.\n", + "2. Using Recursive Retrieval with LlamaParse to query tables/ text within a document hierarchically.\n", + "\n", + "[LlamaParse Documentation](https://github.com/run-llama/llama_parse/)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting llama-index\n", + " Downloading llama_index-0.11.5-py3-none-any.whl.metadata (11 kB)\n", + "Collecting llama-index-agent-openai<0.4.0,>=0.3.0 (from llama-index)\n", + " Downloading llama_index_agent_openai-0.3.0-py3-none-any.whl.metadata (728 bytes)\n", + "Collecting llama-index-cli<0.4.0,>=0.3.0 (from llama-index)\n", + " Downloading llama_index_cli-0.3.0-py3-none-any.whl.metadata (1.5 kB)\n", + "Collecting llama-index-core<0.12.0,>=0.11.5 (from llama-index)\n", + " Downloading llama_index_core-0.11.5-py3-none-any.whl.metadata (2.4 kB)\n", + "Collecting llama-index-embeddings-openai<0.3.0,>=0.2.4 (from llama-index)\n", + " Downloading llama_index_embeddings_openai-0.2.4-py3-none-any.whl.metadata (635 bytes)\n", + "Collecting llama-index-indices-managed-llama-cloud>=0.3.0 (from llama-index)\n", + " Downloading llama_index_indices_managed_llama_cloud-0.3.0-py3-none-any.whl.metadata (3.8 kB)\n", + "Collecting llama-index-legacy<0.10.0,>=0.9.48 (from llama-index)\n", + " Downloading llama_index_legacy-0.9.48.post3-py3-none-any.whl.metadata (8.5 kB)\n", + "Collecting llama-index-llms-openai<0.3.0,>=0.2.2 (from llama-index)\n", + " Downloading llama_index_llms_openai-0.2.2-py3-none-any.whl.metadata (705 bytes)\n", + "Collecting llama-index-multi-modal-llms-openai<0.3.0,>=0.2.0 (from llama-index)\n", + " Downloading llama_index_multi_modal_llms_openai-0.2.0-py3-none-any.whl.metadata (728 bytes)\n", + "Collecting llama-index-program-openai<0.3.0,>=0.2.0 (from llama-index)\n", + " Downloading llama_index_program_openai-0.2.0-py3-none-any.whl.metadata (766 bytes)\n", + "Collecting llama-index-question-gen-openai<0.3.0,>=0.2.0 (from llama-index)\n", + " Downloading llama_index_question_gen_openai-0.2.0-py3-none-any.whl.metadata (785 bytes)\n", + "Collecting llama-index-readers-file<0.3.0,>=0.2.0 (from llama-index)\n", + " Downloading llama_index_readers_file-0.2.0-py3-none-any.whl.metadata (5.4 kB)\n", + "Collecting llama-index-readers-llama-parse>=0.3.0 (from llama-index)\n", + " Downloading llama_index_readers_llama_parse-0.3.0-py3-none-any.whl.metadata (3.5 kB)\n", + "Collecting nltk>3.8.1 (from llama-index)\n", + " Downloading nltk-3.9.1-py3-none-any.whl.metadata (2.9 kB)\n", + "Collecting openai>=1.14.0 (from llama-index-agent-openai<0.4.0,>=0.3.0->llama-index)\n", + " Downloading openai-1.43.0-py3-none-any.whl.metadata (22 kB)\n", + "Requirement already satisfied: PyYAML>=6.0.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.5->llama-index) (6.0.1)\n", + "Collecting SQLAlchemy>=1.4.49 (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (9.6 kB)\n", + "Collecting aiohttp<4.0.0,>=3.8.6 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (7.5 kB)\n", + "Collecting dataclasses-json (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading dataclasses_json-0.6.7-py3-none-any.whl.metadata (25 kB)\n", + "Collecting deprecated>=1.2.9.3 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading Deprecated-1.2.14-py2.py3-none-any.whl.metadata (5.4 kB)\n", + "Collecting dirtyjson<2.0.0,>=1.0.8 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading dirtyjson-1.0.8-py3-none-any.whl.metadata (11 kB)\n", + "Collecting fsspec>=2023.5.0 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading fsspec-2024.9.0-py3-none-any.whl.metadata (11 kB)\n", + "Collecting httpx (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading httpx-0.27.2-py3-none-any.whl.metadata (7.1 kB)\n", + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.5->llama-index) (1.5.8)\n", + "Requirement already satisfied: networkx>=3.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.5->llama-index) (3.0)\n", + "Requirement already satisfied: numpy<2.0.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.5->llama-index) (1.24.1)\n", + "Requirement already satisfied: pillow>=9.0.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.5->llama-index) (9.3.0)\n", + "Collecting pydantic<3.0.0,>=2.7.0 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading pydantic-2.8.2-py3-none-any.whl.metadata (125 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m125.2/125.2 kB\u001b[0m \u001b[31m29.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: requests>=2.31.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.5->llama-index) (2.31.0)\n", + "Collecting tenacity!=8.4.0,<9.0.0,>=8.2.0 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading tenacity-8.5.0-py3-none-any.whl.metadata (1.2 kB)\n", + "Collecting tiktoken>=0.3.3 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.6 kB)\n", + "Collecting tqdm<5.0.0,>=4.66.1 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading tqdm-4.66.5-py3-none-any.whl.metadata (57 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.6/57.6 kB\u001b[0m \u001b[31m24.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting typing-extensions>=4.5.0 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading typing_extensions-4.12.2-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting typing-inspect>=0.8.0 (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading typing_inspect-0.9.0-py3-none-any.whl.metadata (1.5 kB)\n", + "Collecting wrapt (from llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.6 kB)\n", + "Collecting llama-cloud>=0.0.11 (from llama-index-indices-managed-llama-cloud>=0.3.0->llama-index)\n", + " Downloading llama_cloud-0.0.15-py3-none-any.whl.metadata (751 bytes)\n", + "Collecting pandas (from llama-index-legacy<0.10.0,>=0.9.48->llama-index)\n", + " Downloading pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (19 kB)\n", + "Collecting beautifulsoup4<5.0.0,>=4.12.3 (from llama-index-readers-file<0.3.0,>=0.2.0->llama-index)\n", + " Downloading beautifulsoup4-4.12.3-py3-none-any.whl.metadata (3.8 kB)\n", + "Collecting pypdf<5.0.0,>=4.0.1 (from llama-index-readers-file<0.3.0,>=0.2.0->llama-index)\n", + " Downloading pypdf-4.3.1-py3-none-any.whl.metadata (7.4 kB)\n", + "Collecting striprtf<0.0.27,>=0.0.26 (from llama-index-readers-file<0.3.0,>=0.2.0->llama-index)\n", + " Downloading striprtf-0.0.26-py3-none-any.whl.metadata (2.1 kB)\n", + "Collecting llama-parse>=0.5.0 (from llama-index-readers-llama-parse>=0.3.0->llama-index)\n", + " Downloading llama_parse-0.5.2-py3-none-any.whl.metadata (4.5 kB)\n", + "Collecting click (from nltk>3.8.1->llama-index)\n", + " Downloading click-8.1.7-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting joblib (from nltk>3.8.1->llama-index)\n", + " Downloading joblib-1.4.2-py3-none-any.whl.metadata (5.4 kB)\n", + "Collecting regex>=2021.8.3 (from nltk>3.8.1->llama-index)\n", + " Downloading regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (40 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m40.5/40.5 kB\u001b[0m \u001b[31m12.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting aiohappyeyeballs>=2.3.0 (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading aiohappyeyeballs-2.4.0-py3-none-any.whl.metadata (5.9 kB)\n", + "Collecting aiosignal>=1.1.2 (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading aiosignal-1.3.1-py3-none-any.whl.metadata (4.0 kB)\n", + "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index) (23.1.0)\n", + "Collecting frozenlist>=1.1.1 (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n", + "Collecting multidict<7.0,>=4.5 (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.2 kB)\n", + "Collecting yarl<2.0,>=1.0 (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading yarl-1.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (43 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m43.0/43.0 kB\u001b[0m \u001b[31m17.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting async-timeout<5.0,>=4.0 (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading async_timeout-4.0.3-py3-none-any.whl.metadata (4.2 kB)\n", + "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4<5.0.0,>=4.12.3->llama-index-readers-file<0.3.0,>=0.2.0->llama-index) (2.5)\n", + "Requirement already satisfied: anyio in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.5->llama-index) (4.0.0)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.5->llama-index) (2022.12.7)\n", + "Collecting httpcore==1.* (from httpx->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading httpcore-1.0.5-py3-none-any.whl.metadata (20 kB)\n", + "Requirement already satisfied: idna in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.5->llama-index) (3.4)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.5->llama-index) (1.3.0)\n", + "Collecting h11<0.15,>=0.13 (from httpcore==1.*->httpx->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading h11-0.14.0-py3-none-any.whl.metadata (8.2 kB)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /usr/lib/python3/dist-packages (from openai>=1.14.0->llama-index-agent-openai<0.4.0,>=0.3.0->llama-index) (1.7.0)\n", + "Collecting jiter<1,>=0.4.0 (from openai>=1.14.0->llama-index-agent-openai<0.4.0,>=0.3.0->llama-index)\n", + " Downloading jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.6 kB)\n", + "Collecting annotated-types>=0.4.0 (from pydantic<3.0.0,>=2.7.0->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading annotated_types-0.7.0-py3-none-any.whl.metadata (15 kB)\n", + "Collecting pydantic-core==2.20.1 (from pydantic<3.0.0,>=2.7.0->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.6 kB)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core<0.12.0,>=0.11.5->llama-index) (2.1.1)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core<0.12.0,>=0.11.5->llama-index) (1.26.13)\n", + "Collecting greenlet!=0.4.17 (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl.metadata (3.8 kB)\n", + "Collecting mypy-extensions>=0.3.0 (from typing-inspect>=0.8.0->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading mypy_extensions-1.0.0-py3-none-any.whl.metadata (1.1 kB)\n", + "Collecting marshmallow<4.0.0,>=3.18.0 (from dataclasses-json->llama-index-core<0.12.0,>=0.11.5->llama-index)\n", + " Downloading marshmallow-3.22.0-py3-none-any.whl.metadata (7.2 kB)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->llama-index-legacy<0.10.0,>=0.9.48->llama-index) (2.8.2)\n", + "Collecting pytz>=2020.1 (from pandas->llama-index-legacy<0.10.0,>=0.9.48->llama-index)\n", + " Downloading pytz-2024.1-py2.py3-none-any.whl.metadata (22 kB)\n", + "Collecting tzdata>=2022.7 (from pandas->llama-index-legacy<0.10.0,>=0.9.48->llama-index)\n", + " Downloading tzdata-2024.1-py2.py3-none-any.whl.metadata (1.4 kB)\n", + "Requirement already satisfied: exceptiongroup>=1.0.2 in /usr/local/lib/python3.10/dist-packages (from anyio->httpx->llama-index-core<0.12.0,>=0.11.5->llama-index) (1.1.3)\n", + "Requirement already satisfied: packaging>=17.0 in /usr/local/lib/python3.10/dist-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.12.0,>=0.11.5->llama-index) (23.2)\n", + "Requirement already satisfied: six>=1.5 in /usr/lib/python3/dist-packages (from python-dateutil>=2.8.2->pandas->llama-index-legacy<0.10.0,>=0.9.48->llama-index) (1.16.0)\n", + "Downloading llama_index-0.11.5-py3-none-any.whl (6.8 kB)\n", + "Downloading llama_index_agent_openai-0.3.0-py3-none-any.whl (13 kB)\n", + "Downloading llama_index_cli-0.3.0-py3-none-any.whl (27 kB)\n", + "Downloading llama_index_core-0.11.5-py3-none-any.whl (1.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m111.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading llama_index_embeddings_openai-0.2.4-py3-none-any.whl (6.1 kB)\n", + "Downloading llama_index_indices_managed_llama_cloud-0.3.0-py3-none-any.whl (9.5 kB)\n", + "Downloading llama_index_legacy-0.9.48.post3-py3-none-any.whl (1.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m96.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading llama_index_llms_openai-0.2.2-py3-none-any.whl (12 kB)\n", + "Downloading llama_index_multi_modal_llms_openai-0.2.0-py3-none-any.whl (5.9 kB)\n", + "Downloading llama_index_program_openai-0.2.0-py3-none-any.whl (5.3 kB)\n", + "Downloading llama_index_question_gen_openai-0.2.0-py3-none-any.whl (2.9 kB)\n", + "Downloading llama_index_readers_file-0.2.0-py3-none-any.whl (38 kB)\n", + "Downloading llama_index_readers_llama_parse-0.3.0-py3-none-any.whl (2.5 kB)\n", + "Downloading nltk-3.9.1-py3-none-any.whl (1.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.5/1.5 MB\u001b[0m \u001b[31m103.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m102.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading beautifulsoup4-4.12.3-py3-none-any.whl (147 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m147.9/147.9 kB\u001b[0m \u001b[31m42.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n", + "Downloading dirtyjson-1.0.8-py3-none-any.whl (25 kB)\n", + "Downloading fsspec-2024.9.0-py3-none-any.whl (179 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m179.3/179.3 kB\u001b[0m \u001b[31m43.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading llama_cloud-0.0.15-py3-none-any.whl (180 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m180.2/180.2 kB\u001b[0m \u001b[31m45.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading httpx-0.27.2-py3-none-any.whl (76 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m76.4/76.4 kB\u001b[0m \u001b[31m36.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading httpcore-1.0.5-py3-none-any.whl (77 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m77.9/77.9 kB\u001b[0m \u001b[31m22.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading llama_parse-0.5.2-py3-none-any.whl (9.5 kB)\n", + "Downloading openai-1.43.0-py3-none-any.whl (365 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m365.7/365.7 kB\u001b[0m \u001b[31m72.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading pydantic-2.8.2-py3-none-any.whl (423 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m423.9/423.9 kB\u001b[0m \u001b[31m102.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m102.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading pypdf-4.3.1-py3-none-any.whl (295 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m295.8/295.8 kB\u001b[0m \u001b[31m60.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (776 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m776.5/776.5 kB\u001b[0m \u001b[31m104.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m104.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading striprtf-0.0.26-py3-none-any.whl (6.9 kB)\n", + "Downloading tenacity-8.5.0-py3-none-any.whl (28 kB)\n", + "Downloading tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.1 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m92.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading tqdm-4.66.5-py3-none-any.whl (78 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.4/78.4 kB\u001b[0m \u001b[31m21.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading typing_extensions-4.12.2-py3-none-any.whl (37 kB)\n", + "Downloading typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n", + "Downloading wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (80 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m80.3/80.3 kB\u001b[0m \u001b[31m25.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading click-8.1.7-py3-none-any.whl (97 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m97.9/97.9 kB\u001b[0m \u001b[31m32.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading dataclasses_json-0.6.7-py3-none-any.whl (28 kB)\n", + "Downloading joblib-1.4.2-py3-none-any.whl (301 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m301.8/301.8 kB\u001b[0m \u001b[31m85.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (13.0 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m13.0/13.0 MB\u001b[0m \u001b[31m90.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0mta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading aiohappyeyeballs-2.4.0-py3-none-any.whl (12 kB)\n", + "Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n", + "Downloading annotated_types-0.7.0-py3-none-any.whl (13 kB)\n", + "Downloading async_timeout-4.0.3-py3-none-any.whl (5.7 kB)\n", + "Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (239 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m239.5/239.5 kB\u001b[0m \u001b[31m59.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl (616 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m616.0/616.0 kB\u001b[0m \u001b[31m112.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (318 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m318.9/318.9 kB\u001b[0m \u001b[31m103.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading marshmallow-3.22.0-py3-none-any.whl (49 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.3/49.3 kB\u001b[0m \u001b[31m15.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (124 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m124.3/124.3 kB\u001b[0m \u001b[31m53.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n", + "Downloading pytz-2024.1-py2.py3-none-any.whl (505 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m505.5/505.5 kB\u001b[0m \u001b[31m106.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading tzdata-2024.1-py2.py3-none-any.whl (345 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m345.4/345.4 kB\u001b[0m \u001b[31m92.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading yarl-1.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (468 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m468.3/468.3 kB\u001b[0m \u001b[31m72.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading h11-0.14.0-py3-none-any.whl (58 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m27.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hInstalling collected packages: striprtf, pytz, dirtyjson, wrapt, tzdata, typing-extensions, tqdm, tenacity, regex, mypy-extensions, multidict, marshmallow, joblib, jiter, h11, greenlet, fsspec, frozenlist, click, beautifulsoup4, async-timeout, annotated-types, aiohappyeyeballs, yarl, typing-inspect, tiktoken, SQLAlchemy, pypdf, pydantic-core, pandas, nltk, httpcore, deprecated, aiosignal, pydantic, httpx, dataclasses-json, aiohttp, openai, llama-index-core, llama-cloud, llama-parse, llama-index-readers-file, llama-index-legacy, llama-index-indices-managed-llama-cloud, llama-index-embeddings-openai, llama-index-readers-llama-parse, llama-index-llms-openai, llama-index-agent-openai, llama-index-program-openai, llama-index-question-gen-openai, llama-index-multi-modal-llms-openai, llama-index-cli, llama-index\n", + " Attempting uninstall: typing-extensions\n", + " Found existing installation: typing_extensions 4.4.0\n", + " Uninstalling typing_extensions-4.4.0:\n", + " Successfully uninstalled typing_extensions-4.4.0\n", + " Attempting uninstall: fsspec\n", + " Found existing installation: fsspec 2023.4.0\n", + " Uninstalling fsspec-2023.4.0:\n", + " Successfully uninstalled fsspec-2023.4.0\n", + " Attempting uninstall: beautifulsoup4\n", + " Found existing installation: beautifulsoup4 4.12.2\n", + " Uninstalling beautifulsoup4-4.12.2:\n", + " Successfully uninstalled beautifulsoup4-4.12.2\n", + "Successfully installed SQLAlchemy-2.0.34 aiohappyeyeballs-2.4.0 aiohttp-3.10.5 aiosignal-1.3.1 annotated-types-0.7.0 async-timeout-4.0.3 beautifulsoup4-4.12.3 click-8.1.7 dataclasses-json-0.6.7 deprecated-1.2.14 dirtyjson-1.0.8 frozenlist-1.4.1 fsspec-2024.9.0 greenlet-3.0.3 h11-0.14.0 httpcore-1.0.5 httpx-0.27.2 jiter-0.5.0 joblib-1.4.2 llama-cloud-0.0.15 llama-index-0.11.5 llama-index-agent-openai-0.3.0 llama-index-cli-0.3.0 llama-index-core-0.11.5 llama-index-embeddings-openai-0.2.4 llama-index-indices-managed-llama-cloud-0.3.0 llama-index-legacy-0.9.48.post3 llama-index-llms-openai-0.2.2 llama-index-multi-modal-llms-openai-0.2.0 llama-index-program-openai-0.2.0 llama-index-question-gen-openai-0.2.0 llama-index-readers-file-0.2.0 llama-index-readers-llama-parse-0.3.0 llama-parse-0.5.2 marshmallow-3.22.0 multidict-6.0.5 mypy-extensions-1.0.0 nltk-3.9.1 openai-1.43.0 pandas-2.2.2 pydantic-2.8.2 pydantic-core-2.20.1 pypdf-4.3.1 pytz-2024.1 regex-2024.7.24 striprtf-0.0.26 tenacity-8.5.0 tiktoken-0.7.0 tqdm-4.66.5 typing-extensions-4.12.2 typing-inspect-0.9.0 tzdata-2024.1 wrapt-1.16.0 yarl-1.9.11\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython -m pip install --upgrade pip\u001b[0m\n", + "Collecting llama-index-postprocessor-flag-embedding-reranker\n", + " Downloading llama_index_postprocessor_flag_embedding_reranker-0.2.0-py3-none-any.whl.metadata (714 bytes)\n", + "Requirement already satisfied: llama-index-core<0.12.0,>=0.11.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-postprocessor-flag-embedding-reranker) (0.11.5)\n", + "Requirement already satisfied: PyYAML>=6.0.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (6.0.1)\n", + "Requirement already satisfied: SQLAlchemy>=1.4.49 in /usr/local/lib/python3.10/dist-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2.0.34)\n", + "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (3.10.5)\n", + "Requirement already satisfied: dataclasses-json in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (0.6.7)\n", + "Requirement already satisfied: deprecated>=1.2.9.3 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.2.14)\n", + "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.0.8)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2024.9.0)\n", + "Requirement already satisfied: httpx in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (0.27.2)\n", + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.5.8)\n", + "Requirement already satisfied: networkx>=3.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (3.0)\n", + "Requirement already satisfied: nltk>3.8.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (3.9.1)\n", + "Requirement already satisfied: numpy<2.0.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.24.1)\n", + "Requirement already satisfied: pillow>=9.0.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (9.3.0)\n", + "Requirement already satisfied: pydantic<3.0.0,>=2.7.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2.8.2)\n", + "Requirement already satisfied: requests>=2.31.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2.31.0)\n", + "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (8.5.0)\n", + "Requirement already satisfied: tiktoken>=0.3.3 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (0.7.0)\n", + "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (4.66.5)\n", + "Requirement already satisfied: typing-extensions>=4.5.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (4.12.2)\n", + "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (0.9.0)\n", + "Requirement already satisfied: wrapt in /usr/local/lib/python3.10/dist-packages (from llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.16.0)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2.4.0)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (23.1.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.4.1)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (6.0.5)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.9.11)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (4.0.3)\n", + "Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk>3.8.1->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (8.1.7)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk>3.8.1->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.4.2)\n", + "Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk>3.8.1->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2024.7.24)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from pydantic<3.0.0,>=2.7.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/lib/python3.10/dist-packages (from pydantic<3.0.0,>=2.7.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2.20.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2.1.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (3.4)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.26.13)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (2022.12.7)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.10/dist-packages (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (3.0.3)\n", + "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from typing-inspect>=0.8.0->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.0.0)\n", + "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/lib/python3.10/dist-packages (from dataclasses-json->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (3.22.0)\n", + "Requirement already satisfied: anyio in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (4.0.0)\n", + "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.0.5)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.3.0)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.10/dist-packages (from httpcore==1.*->httpx->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (0.14.0)\n", + "Requirement already satisfied: packaging>=17.0 in /usr/local/lib/python3.10/dist-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (23.2)\n", + "Requirement already satisfied: exceptiongroup>=1.0.2 in /usr/local/lib/python3.10/dist-packages (from anyio->httpx->llama-index-core<0.12.0,>=0.11.0->llama-index-postprocessor-flag-embedding-reranker) (1.1.3)\n", + "Downloading llama_index_postprocessor_flag_embedding_reranker-0.2.0-py3-none-any.whl (3.0 kB)\n", + "Installing collected packages: llama-index-postprocessor-flag-embedding-reranker\n", + "Successfully installed llama-index-postprocessor-flag-embedding-reranker-0.2.0\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython -m pip install --upgrade pip\u001b[0m\n", + "Collecting git+https://github.com/FlagOpen/FlagEmbedding.git\n", + " Cloning https://github.com/FlagOpen/FlagEmbedding.git to /tmp/pip-req-build-g7g78sb6\n", + " Running command git clone --filter=blob:none --quiet https://github.com/FlagOpen/FlagEmbedding.git /tmp/pip-req-build-g7g78sb6\n", + " Resolved https://github.com/FlagOpen/FlagEmbedding.git to commit ddad0f9cb9a46be41fdb5d9cde47cfedf2e43241\n", + " Preparing metadata (setup.py) ... \u001b[?25ldone\n", + "\u001b[?25hRequirement already satisfied: torch>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from FlagEmbedding==1.2.11) (2.1.0+cu118)\n", + "Collecting transformers>=4.33.0 (from FlagEmbedding==1.2.11)\n", + " Downloading transformers-4.44.2-py3-none-any.whl.metadata (43 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m43.7/43.7 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting datasets (from FlagEmbedding==1.2.11)\n", + " Downloading datasets-2.21.0-py3-none-any.whl.metadata (21 kB)\n", + "Collecting accelerate>=0.20.1 (from FlagEmbedding==1.2.11)\n", + " Downloading accelerate-0.34.0-py3-none-any.whl.metadata (19 kB)\n", + "Collecting sentence_transformers (from FlagEmbedding==1.2.11)\n", + " Downloading sentence_transformers-3.0.1-py3-none-any.whl.metadata (10 kB)\n", + "Collecting peft (from FlagEmbedding==1.2.11)\n", + " Downloading peft-0.12.0-py3-none-any.whl.metadata (13 kB)\n", + "Requirement already satisfied: numpy<3.0.0,>=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate>=0.20.1->FlagEmbedding==1.2.11) (1.24.1)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate>=0.20.1->FlagEmbedding==1.2.11) (23.2)\n", + "Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate>=0.20.1->FlagEmbedding==1.2.11) (5.9.6)\n", + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate>=0.20.1->FlagEmbedding==1.2.11) (6.0.1)\n", + "Collecting huggingface-hub>=0.21.0 (from accelerate>=0.20.1->FlagEmbedding==1.2.11)\n", + " Downloading huggingface_hub-0.24.6-py3-none-any.whl.metadata (13 kB)\n", + "Collecting safetensors>=0.4.3 (from accelerate>=0.20.1->FlagEmbedding==1.2.11)\n", + " Downloading safetensors-0.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.8 kB)\n", + "Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (3.9.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (4.12.2)\n", + "Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (1.12)\n", + "Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (3.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (3.1.2)\n", + "Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (2024.9.0)\n", + "Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.6.0->FlagEmbedding==1.2.11) (2.1.0)\n", + "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers>=4.33.0->FlagEmbedding==1.2.11) (2024.7.24)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers>=4.33.0->FlagEmbedding==1.2.11) (2.31.0)\n", + "Collecting tokenizers<0.20,>=0.19 (from transformers>=4.33.0->FlagEmbedding==1.2.11)\n", + " Downloading tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.7 kB)\n", + "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers>=4.33.0->FlagEmbedding==1.2.11) (4.66.5)\n", + "Collecting pyarrow>=15.0.0 (from datasets->FlagEmbedding==1.2.11)\n", + " Downloading pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl.metadata (3.3 kB)\n", + "Collecting dill<0.3.9,>=0.3.0 (from datasets->FlagEmbedding==1.2.11)\n", + " Downloading dill-0.3.8-py3-none-any.whl.metadata (10 kB)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from datasets->FlagEmbedding==1.2.11) (2.2.2)\n", + "Collecting requests (from transformers>=4.33.0->FlagEmbedding==1.2.11)\n", + " Downloading requests-2.32.3-py3-none-any.whl.metadata (4.6 kB)\n", + "Collecting xxhash (from datasets->FlagEmbedding==1.2.11)\n", + " Downloading xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n", + "Collecting multiprocess (from datasets->FlagEmbedding==1.2.11)\n", + " Downloading multiprocess-0.70.16-py310-none-any.whl.metadata (7.2 kB)\n", + "Collecting fsspec (from torch>=1.6.0->FlagEmbedding==1.2.11)\n", + " Downloading fsspec-2024.6.1-py3-none-any.whl.metadata (11 kB)\n", + "Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets->FlagEmbedding==1.2.11) (3.10.5)\n", + "Collecting scikit-learn (from sentence_transformers->FlagEmbedding==1.2.11)\n", + " Downloading scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n", + "Collecting scipy (from sentence_transformers->FlagEmbedding==1.2.11)\n", + " Downloading scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (60 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m60.8/60.8 kB\u001b[0m \u001b[31m29.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: Pillow in /usr/local/lib/python3.10/dist-packages (from sentence_transformers->FlagEmbedding==1.2.11) (9.3.0)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (2.4.0)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (23.1.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (1.4.1)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (6.0.5)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (1.9.11)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->FlagEmbedding==1.2.11) (4.0.3)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.33.0->FlagEmbedding==1.2.11) (2.1.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.33.0->FlagEmbedding==1.2.11) (3.4)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.33.0->FlagEmbedding==1.2.11) (1.26.13)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers>=4.33.0->FlagEmbedding==1.2.11) (2022.12.7)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.6.0->FlagEmbedding==1.2.11) (2.1.2)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->FlagEmbedding==1.2.11) (2.8.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->FlagEmbedding==1.2.11) (2024.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->FlagEmbedding==1.2.11) (2024.1)\n", + "Requirement already satisfied: joblib>=1.2.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->sentence_transformers->FlagEmbedding==1.2.11) (1.4.2)\n", + "Collecting threadpoolctl>=3.1.0 (from scikit-learn->sentence_transformers->FlagEmbedding==1.2.11)\n", + " Downloading threadpoolctl-3.5.0-py3-none-any.whl.metadata (13 kB)\n", + "Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.6.0->FlagEmbedding==1.2.11) (1.3.0)\n", + "Requirement already satisfied: six>=1.5 in /usr/lib/python3/dist-packages (from python-dateutil>=2.8.2->pandas->datasets->FlagEmbedding==1.2.11) (1.16.0)\n", + "Downloading accelerate-0.34.0-py3-none-any.whl (324 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m324.3/324.3 kB\u001b[0m \u001b[31m47.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading transformers-4.44.2-py3-none-any.whl (9.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m9.5/9.5 MB\u001b[0m \u001b[31m98.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0mta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading datasets-2.21.0-py3-none-any.whl (527 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m527.3/527.3 kB\u001b[0m \u001b[31m93.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading peft-0.12.0-py3-none-any.whl (296 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m296.4/296.4 kB\u001b[0m \u001b[31m57.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading sentence_transformers-3.0.1-py3-none-any.whl (227 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m227.1/227.1 kB\u001b[0m \u001b[31m49.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading dill-0.3.8-py3-none-any.whl (116 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m31.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading fsspec-2024.6.1-py3-none-any.whl (177 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m177.6/177.6 kB\u001b[0m \u001b[31m43.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading huggingface_hub-0.24.6-py3-none-any.whl (417 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m417.5/417.5 kB\u001b[0m \u001b[31m84.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl (39.9 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m39.9/39.9 MB\u001b[0m \u001b[31m78.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hDownloading requests-2.32.3-py3-none-any.whl (64 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.9/64.9 kB\u001b[0m \u001b[31m22.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading safetensors-0.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (435 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m435.5/435.5 kB\u001b[0m \u001b[31m78.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.6/3.6 MB\u001b[0m \u001b[31m97.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m\n", + "\u001b[?25hDownloading multiprocess-0.70.16-py310-none-any.whl (134 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m53.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (13.4 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m13.4/13.4 MB\u001b[0m \u001b[31m94.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0mta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (41.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.2/41.2 MB\u001b[0m \u001b[31m74.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hDownloading xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m57.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading threadpoolctl-3.5.0-py3-none-any.whl (18 kB)\n", + "Building wheels for collected packages: FlagEmbedding\n", + " Building wheel for FlagEmbedding (setup.py) ... \u001b[?25ldone\n", + "\u001b[?25h Created wheel for FlagEmbedding: filename=FlagEmbedding-1.2.11-py3-none-any.whl size=1532999 sha256=e8fdb11999cd20961dbf8a2cdca1f2a8e9daae60c7ace5a2181e529e49f69801\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-0w5j143m/wheels/41/cf/a5/5dee96ed64e5aaffe5aa3d583828258fdefed9a305db6e7f48\n", + "Successfully built FlagEmbedding\n", + "Installing collected packages: xxhash, threadpoolctl, scipy, safetensors, requests, pyarrow, fsspec, dill, scikit-learn, multiprocess, huggingface-hub, tokenizers, accelerate, transformers, datasets, sentence_transformers, peft, FlagEmbedding\n", + " Attempting uninstall: requests\n", + " Found existing installation: requests 2.31.0\n", + " Uninstalling requests-2.31.0:\n", + " Successfully uninstalled requests-2.31.0\n", + " Attempting uninstall: fsspec\n", + " Found existing installation: fsspec 2024.9.0\n", + " Uninstalling fsspec-2024.9.0:\n", + " Successfully uninstalled fsspec-2024.9.0\n", + "Successfully installed FlagEmbedding-1.2.11 accelerate-0.34.0 datasets-2.21.0 dill-0.3.8 fsspec-2024.6.1 huggingface-hub-0.24.6 multiprocess-0.70.16 peft-0.12.0 pyarrow-17.0.0 requests-2.32.3 safetensors-0.4.4 scikit-learn-1.5.1 scipy-1.14.1 sentence_transformers-3.0.1 threadpoolctl-3.5.0 tokenizers-0.19.1 transformers-4.44.2 xxhash-3.5.0\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython -m pip install --upgrade pip\u001b[0m\n", + "Requirement already satisfied: llama-parse in /usr/local/lib/python3.10/dist-packages (0.5.2)\n", + "Requirement already satisfied: llama-index-core>=0.11.0 in /usr/local/lib/python3.10/dist-packages (from llama-parse) (0.11.5)\n", + "Requirement already satisfied: PyYAML>=6.0.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (6.0.1)\n", + "Requirement already satisfied: SQLAlchemy>=1.4.49 in /usr/local/lib/python3.10/dist-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core>=0.11.0->llama-parse) (2.0.34)\n", + "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (3.10.5)\n", + "Requirement already satisfied: dataclasses-json in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (0.6.7)\n", + "Requirement already satisfied: deprecated>=1.2.9.3 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (1.2.14)\n", + "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (1.0.8)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (2024.6.1)\n", + "Requirement already satisfied: httpx in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (0.27.2)\n", + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (1.5.8)\n", + "Requirement already satisfied: networkx>=3.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (3.0)\n", + "Requirement already satisfied: nltk>3.8.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (3.9.1)\n", + "Requirement already satisfied: numpy<2.0.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (1.24.1)\n", + "Requirement already satisfied: pillow>=9.0.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (9.3.0)\n", + "Requirement already satisfied: pydantic<3.0.0,>=2.7.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (2.8.2)\n", + "Requirement already satisfied: requests>=2.31.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (2.32.3)\n", + "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.2.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (8.5.0)\n", + "Requirement already satisfied: tiktoken>=0.3.3 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (0.7.0)\n", + "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (4.66.5)\n", + "Requirement already satisfied: typing-extensions>=4.5.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (4.12.2)\n", + "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (0.9.0)\n", + "Requirement already satisfied: wrapt in /usr/local/lib/python3.10/dist-packages (from llama-index-core>=0.11.0->llama-parse) (1.16.0)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (2.4.0)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (23.1.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (1.4.1)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (6.0.5)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (1.9.11)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.11.0->llama-parse) (4.0.3)\n", + "Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk>3.8.1->llama-index-core>=0.11.0->llama-parse) (8.1.7)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk>3.8.1->llama-index-core>=0.11.0->llama-parse) (1.4.2)\n", + "Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk>3.8.1->llama-index-core>=0.11.0->llama-parse) (2024.7.24)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from pydantic<3.0.0,>=2.7.0->llama-index-core>=0.11.0->llama-parse) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/lib/python3.10/dist-packages (from pydantic<3.0.0,>=2.7.0->llama-index-core>=0.11.0->llama-parse) (2.20.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core>=0.11.0->llama-parse) (2.1.1)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core>=0.11.0->llama-parse) (3.4)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core>=0.11.0->llama-parse) (1.26.13)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.31.0->llama-index-core>=0.11.0->llama-parse) (2022.12.7)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.10/dist-packages (from SQLAlchemy>=1.4.49->SQLAlchemy[asyncio]>=1.4.49->llama-index-core>=0.11.0->llama-parse) (3.0.3)\n", + "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from typing-inspect>=0.8.0->llama-index-core>=0.11.0->llama-parse) (1.0.0)\n", + "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/lib/python3.10/dist-packages (from dataclasses-json->llama-index-core>=0.11.0->llama-parse) (3.22.0)\n", + "Requirement already satisfied: anyio in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core>=0.11.0->llama-parse) (4.0.0)\n", + "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core>=0.11.0->llama-parse) (1.0.5)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.10/dist-packages (from httpx->llama-index-core>=0.11.0->llama-parse) (1.3.0)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.10/dist-packages (from httpcore==1.*->httpx->llama-index-core>=0.11.0->llama-parse) (0.14.0)\n", + "Requirement already satisfied: packaging>=17.0 in /usr/local/lib/python3.10/dist-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core>=0.11.0->llama-parse) (23.2)\n", + "Requirement already satisfied: exceptiongroup>=1.0.2 in /usr/local/lib/python3.10/dist-packages (from anyio->httpx->llama-index-core>=0.11.0->llama-parse) (1.1.3)\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython -m pip install --upgrade pip\u001b[0m\n" + ] + } + ], + "source": [ + "!pip install llama-index\n", + "!pip install llama-index-postprocessor-flag-embedding-reranker\n", + "!pip install git+https://github.com/FlagOpen/FlagEmbedding.git\n", + "!pip install llama-parse" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-09-05 07:01:47-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10q/uber_10q_march_2022.pdf\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.110.133, 185.199.109.133, 185.199.108.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.110.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1260185 (1.2M) [application/octet-stream]\n", + "Saving to: ‘./uber_10q_march_2022.pdf’\n", + "\n", + "./uber_10q_march_20 100%[===================>] 1.20M --.-KB/s in 0.02s \n", + "\n", + "2024-09-05 07:01:48 (77.6 MB/s) - ‘./uber_10q_march_2022.pdf’ saved [1260185/1260185]\n", + "\n" + ] + } + ], + "source": [ + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/10q/uber_10q_march_2022.pdf' -O './uber_10q_march_2022.pdf'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Setting API Keys" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# llama-parse is async-first, running the async code in a notebook requires the use of nest_asyncio\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()\n", + "\n", + "import os\n", + "\n", + "# API access to llama-cloud\n", + "os.environ[\"LLAMA_CLOUD_API_KEY\"] = \"llx-...\"\n", + "\n", + "# Using OpenAI API for embeddings/llms\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Setting LLM and Embedding Model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.core import VectorStoreIndex\n", + "from llama_index.core import Settings\n", + "\n", + "embed_model = OpenAIEmbedding(model=\"text-embedding-3-small\")\n", + "llm = OpenAI(model=\"gpt-3.5-turbo-0125\")\n", + "\n", + "Settings.llm = llm\n", + "Settings.embed_model = embed_model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### LlamaParse PDF reader for PDF Parsing\n", + "\n", + "We compare two different retrieval/ queryengine strategies.\n", + "\n", + "1. Using raw Markdown text as nodes for building index and applying a simple query engine for generating results.\n", + "2. Using MarkdownElementNodeParser for parsing the LlamaParse output Markdown results and building a recursive retriever query engine for generation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Started parsing the file under job_id 0ef2f65b-9cab-4ca8-b221-d20f1f6d1336\n" + ] + } + ], + "source": [ + "# LlamaParse PDF reader for PDF Parsing\n", + "from llama_parse import LlamaParse\n", + "\n", + "documents = LlamaParse(result_type=\"markdown\").load_data(\n", + " \"./uber_10q_march_2022.pdf\"\n", + ")\n", + "# Started parsing the file under job_id b76a572b-d2bb-42ae-bad9-b9810049f1af" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# UNITED STATES SECURITIES AND EXCHANGE COMMISSION\n", + "\n", + "# Washington, D.C. 20549\n", + "\n", + "# FORM 10-Q\n", + "\n", + "(Mark One)\n", + "\n", + "☒ QUARTERLY REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934\n", + "\n", + "For the quarterly period ended March 31, 2022\n", + "\n", + "OR\n", + "\n", + "☐ TRANSITION REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934\n", + "\n", + "For the transition period from_____ to _____\n", + "\n", + "Commission File Number: 001-38902\n", + "\n", + "# UBER TECHNOLOGIES, INC.\n", + "\n", + "(Exact name of registrant as specified in its charter)\n", + "\n", + "Not Applicable\n", + "\n", + "(Former name, former address and former fiscal year, if changed since last report)\n", + "\n", + "|Delaware|45-2647441|\n", + "|---|---|\n", + "|(State or other jurisdiction of incorporation or organization)|(I.R.S. Employer Identification No.)|\n", + "|1515 3rd Street|San Francisco, California 94158|\n", + "|(Address of principal executive offices, including zip code)|(415) 612-8582|\n", + "|(Registrant’s telephone number, including area code)| |\n", + "\n", + "# Securities registered pursuant to Section 12(b) of the Act:\n", + "\n", + "|Title of each c...\n" + ] + } + ], + "source": [ + "print(documents[0].text[:1000] + \"...\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "3it [00:00, 41803.69it/s]\n", + "1it [00:00, 22310.13it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "1it [00:00, 20867.18it/s]\n", + "1it [00:00, 22429.43it/s]\n", + "1it [00:00, 21399.51it/s]\n", + "1it [00:00, 20460.02it/s]\n", + "1it [00:00, 19508.39it/s]\n", + "1it [00:00, 19508.39it/s]\n", + "5it [00:00, 85598.04it/s]\n", + "0it [00:00, ?it/s]\n", + "2it [00:00, 41527.76it/s]\n", + "2it [00:00, 46091.25it/s]\n", + "2it [00:00, 40524.68it/s]\n", + "2it [00:00, 38836.15it/s]\n", + "2it [00:00, 42366.71it/s]\n", + "2it [00:00, 41943.04it/s]\n", + "1it [00:00, 23967.45it/s]\n", + "1it [00:00, 24818.37it/s]\n", + "1it [00:00, 25890.77it/s]\n", + "4it [00:00, 72628.64it/s]\n", + "2it [00:00, 38836.15it/s]\n", + "3it [00:00, 41943.04it/s]\n", + "0it [00:00, ?it/s]\n", + "3it [00:00, 58254.22it/s]\n", + "3it [00:00, 53773.13it/s]\n", + "1it [00:00, 25575.02it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "1it [00:00, 26051.58it/s]\n", + "1it [00:00, 21509.25it/s]\n", + "0it [00:00, ?it/s]\n", + "1it [00:00, 16008.79it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "2it [00:00, 42153.81it/s]\n", + "4it [00:00, 76260.07it/s]\n", + "5it [00:00, 75166.74it/s]\n", + "2it [00:00, 39383.14it/s]\n", + "2it [00:00, 39756.44it/s]\n", + "1it [00:00, 24244.53it/s]\n", + "2it [00:00, 42153.81it/s]\n", + "0it [00:00, ?it/s]\n", + "1it [00:00, 23045.63it/s]\n", + "1it [00:00, 23431.87it/s]\n", + "1it [00:00, 24528.09it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "1it [00:00, 10810.06it/s]\n", + "2it [00:00, 8473.34it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "1it [00:00, 12633.45it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n", + "0it [00:00, ?it/s]\n" + ] + } + ], + "source": [ + "from llama_index.core.node_parser import MarkdownElementNodeParser\n", + "\n", + "node_parser = MarkdownElementNodeParser(\n", + " llm=OpenAI(model=\"gpt-3.5-turbo-0125\"), num_workers=8\n", + ")\n", + "\n", + "nodes = node_parser.get_nodes_from_documents(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "text_nodes, index_nodes = node_parser.get_nodes_and_objects(nodes)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "TextNode(id_='c6ffea61-1221-40e3-b0e0-5b24cfbd02d5', embedding=None, metadata={}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={: RelatedNodeInfo(node_id='33b7b29c-8eba-458b-a25f-bb8f88951e92', node_type=, metadata={}, hash='3d4ec5b02a042598b0ea47cdac56453869c17b531a10f60343e9598e05a9390e'), : RelatedNodeInfo(node_id='de618b65-c78a-4390-8536-4e9e295c0e49', node_type=, metadata={'col_schema': 'Column: Delaware\\nType: string\\nSummary: State or other jurisdiction of incorporation or organization\\n\\nColumn: 45-2647441\\nType: string\\nSummary: I.R.S. Employer Identification No.'}, hash='c008153189b8dd031a3e5e694239a50ebd21f42602676f072d9746241fcef858')}, text='UNITED STATES SECURITIES AND EXCHANGE COMMISSION\\n\\n Washington, D.C. 20549\\n\\n FORM 10-Q\\n\\n(Mark One)\\n\\n☒ QUARTERLY REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934\\n\\nFor the quarterly period ended March 31, 2022\\n\\nOR\\n\\n☐ TRANSITION REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934\\n\\nFor the transition period from_____ to _____\\n\\nCommission File Number: 001-38902\\n\\n UBER TECHNOLOGIES, INC.\\n\\n(Exact name of registrant as specified in its charter)\\n\\nNot Applicable\\n\\n(Former name, former address and former fiscal year, if changed since last report)', mimetype='text/plain', start_char_idx=1, end_char_idx=595, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text_nodes[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "IndexNode(id_='de618b65-c78a-4390-8536-4e9e295c0e49', embedding=None, metadata={'col_schema': 'Column: Delaware\\nType: string\\nSummary: State or other jurisdiction of incorporation or organization\\n\\nColumn: 45-2647441\\nType: string\\nSummary: I.R.S. Employer Identification No.'}, excluded_embed_metadata_keys=['col_schema'], excluded_llm_metadata_keys=[], relationships={: RelatedNodeInfo(node_id='33b7b29c-8eba-458b-a25f-bb8f88951e92', node_type=, metadata={}, hash='3d4ec5b02a042598b0ea47cdac56453869c17b531a10f60343e9598e05a9390e'), : RelatedNodeInfo(node_id='c6ffea61-1221-40e3-b0e0-5b24cfbd02d5', node_type=, metadata={}, hash='0cafbb2bbffe3085738e748c9ed19c5b88f6b300d876820fc3caa7afa8f0627f'), : RelatedNodeInfo(node_id='c57f8dab-7b69-4850-8885-6a9cf0f531f9', node_type=, metadata={'table_df': \"{'Delaware': {0: '(State or other jurisdiction of incorporation or organization)', 1: '1515 3rd Street', 2: '(Address of principal executive offices, including zip code)', 3: '(Registrant’s telephone number, including area code)'}, '45-2647441': {0: '(I.R.S. Employer Identification No.)', 1: 'San Francisco, California 94158', 2: '(415) 612-8582', 3: ' '}}\", 'table_summary': \"Table providing information about a company's incorporation details, address of principal executive offices, and contact information.,\\nwith the following columns:\\n- Delaware: State or other jurisdiction of incorporation or organization\\n- 45-2647441: I.R.S. Employer Identification No.\\n\"}, hash='fadc844962620525c1d3c8d7ff1693a090642818928f9ce7600117258a39aa04')}, text=\"Table providing information about a company's incorporation details, address of principal executive offices, and contact information.,\\nwith the following columns:\\n- Delaware: State or other jurisdiction of incorporation or organization\\n- 45-2647441: I.R.S. Employer Identification No.\\n\", mimetype='text/plain', start_char_idx=601, end_char_idx=919, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n', index_id='c57f8dab-7b69-4850-8885-6a9cf0f531f9', obj=TextNode(id_='c57f8dab-7b69-4850-8885-6a9cf0f531f9', embedding=None, metadata={'table_df': \"{'Delaware': {0: '(State or other jurisdiction of incorporation or organization)', 1: '1515 3rd Street', 2: '(Address of principal executive offices, including zip code)', 3: '(Registrant’s telephone number, including area code)'}, '45-2647441': {0: '(I.R.S. Employer Identification No.)', 1: 'San Francisco, California 94158', 2: '(415) 612-8582', 3: ' '}}\", 'table_summary': \"Table providing information about a company's incorporation details, address of principal executive offices, and contact information.,\\nwith the following columns:\\n- Delaware: State or other jurisdiction of incorporation or organization\\n- 45-2647441: I.R.S. Employer Identification No.\\n\"}, excluded_embed_metadata_keys=['table_df', 'table_summary'], excluded_llm_metadata_keys=['table_df', 'table_summary'], relationships={: RelatedNodeInfo(node_id='33b7b29c-8eba-458b-a25f-bb8f88951e92', node_type=, metadata={}, hash='3d4ec5b02a042598b0ea47cdac56453869c17b531a10f60343e9598e05a9390e'), : RelatedNodeInfo(node_id='de618b65-c78a-4390-8536-4e9e295c0e49', node_type=, metadata={'col_schema': 'Column: Delaware\\nType: string\\nSummary: State or other jurisdiction of incorporation or organization\\n\\nColumn: 45-2647441\\nType: string\\nSummary: I.R.S. Employer Identification No.'}, hash='c008153189b8dd031a3e5e694239a50ebd21f42602676f072d9746241fcef858'), : RelatedNodeInfo(node_id='c0fa90a9-fe14-46fa-8434-cd70e134d40e', node_type=, metadata={}, hash='cc6b1c09572e3a0bd06a93ccea32a5562b36a393a36ede3852f4a5fc946c51fd')}, text=\"Table providing information about a company's incorporation details, address of principal executive offices, and contact information.,\\nwith the following columns:\\n- Delaware: State or other jurisdiction of incorporation or organization\\n- 45-2647441: I.R.S. Employer Identification No.\\n\\n|Delaware|45-2647441|\\n|---|---|\\n|(State or other jurisdiction of incorporation or organization)|(I.R.S. Employer Identification No.)|\\n|1515 3rd Street|San Francisco, California 94158|\\n|(Address of principal executive offices, including zip code)|(415) 612-8582|\\n|(Registrant’s telephone number, including area code)| |\\n\", mimetype='text/plain', start_char_idx=601, end_char_idx=919, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "index_nodes[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Build Index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "recursive_index = VectorStoreIndex(nodes=text_nodes + index_nodes)\n", + "raw_index = VectorStoreIndex.from_documents(documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Create Query Engines" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "d082846d028644899427f729f182ec05", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "tokenizer_config.json: 0%| | 0.00/443 [00:00\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/customization/llms/SimpleIndexDemo-ChatGPT.ipynb b/docs/docs/examples/customization/llms/SimpleIndexDemo-ChatGPT.ipynb index f8746b1b4924c..0264ed417a4ed 100644 --- a/docs/docs/examples/customization/llms/SimpleIndexDemo-ChatGPT.ipynb +++ b/docs/docs/examples/customization/llms/SimpleIndexDemo-ChatGPT.ipynb @@ -5,7 +5,7 @@ "id": "7ae43f8b", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_camel.ipynb b/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_camel.ipynb index e643d37c36ecc..5fd3303e56758 100644 --- a/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_camel.ipynb +++ b/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_camel.ipynb @@ -5,7 +5,7 @@ "id": "5f753ded", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_stablelm.ipynb b/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_stablelm.ipynb index a9bc05e564c76..df234e5d336e8 100644 --- a/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_stablelm.ipynb +++ b/docs/docs/examples/customization/llms/SimpleIndexDemo-Huggingface_stablelm.ipynb @@ -5,7 +5,7 @@ "id": "6b39b02d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/customization/prompts/chat_prompts.ipynb b/docs/docs/examples/customization/prompts/chat_prompts.ipynb index 6a62f431ed4d0..1c356a792ada6 100644 --- a/docs/docs/examples/customization/prompts/chat_prompts.ipynb +++ b/docs/docs/examples/customization/prompts/chat_prompts.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/customization/prompts/completion_prompts.ipynb b/docs/docs/examples/customization/prompts/completion_prompts.ipynb index 43137468571be..824d342a384d2 100644 --- a/docs/docs/examples/customization/prompts/completion_prompts.ipynb +++ b/docs/docs/examples/customization/prompts/completion_prompts.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/customization/streaming/SimpleIndexDemo-streaming.ipynb b/docs/docs/examples/customization/streaming/SimpleIndexDemo-streaming.ipynb index 7fa593ad2cc74..86515efff87c1 100644 --- a/docs/docs/examples/customization/streaming/SimpleIndexDemo-streaming.ipynb +++ b/docs/docs/examples/customization/streaming/SimpleIndexDemo-streaming.ipynb @@ -5,7 +5,7 @@ "id": "87854970", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/DatabaseReaderDemo.ipynb b/docs/docs/examples/data_connectors/DatabaseReaderDemo.ipynb index 063621a07352a..a275cd40577d9 100644 --- a/docs/docs/examples/data_connectors/DatabaseReaderDemo.ipynb +++ b/docs/docs/examples/data_connectors/DatabaseReaderDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/MakeDemo.ipynb b/docs/docs/examples/data_connectors/MakeDemo.ipynb index 72b7b282a3fc6..fbbc0b5176b30 100644 --- a/docs/docs/examples/data_connectors/MakeDemo.ipynb +++ b/docs/docs/examples/data_connectors/MakeDemo.ipynb @@ -5,7 +5,7 @@ "id": "b890c854", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/MboxReaderDemo.ipynb b/docs/docs/examples/data_connectors/MboxReaderDemo.ipynb index f6ebf3d9668b1..d4b45a9c6fb35 100644 --- a/docs/docs/examples/data_connectors/MboxReaderDemo.ipynb +++ b/docs/docs/examples/data_connectors/MboxReaderDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/MilvusReaderDemo.ipynb b/docs/docs/examples/data_connectors/MilvusReaderDemo.ipynb index d0ad942161f0f..4310fad9db9b7 100644 --- a/docs/docs/examples/data_connectors/MilvusReaderDemo.ipynb +++ b/docs/docs/examples/data_connectors/MilvusReaderDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/MyScaleReaderDemo.ipynb b/docs/docs/examples/data_connectors/MyScaleReaderDemo.ipynb index 96c3172f90f93..583e1276552a4 100644 --- a/docs/docs/examples/data_connectors/MyScaleReaderDemo.ipynb +++ b/docs/docs/examples/data_connectors/MyScaleReaderDemo.ipynb @@ -5,7 +5,7 @@ "id": "105c464c", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/ObsidianReaderDemo.ipynb b/docs/docs/examples/data_connectors/ObsidianReaderDemo.ipynb index 2894f307ca204..33d9a020d45ae 100644 --- a/docs/docs/examples/data_connectors/ObsidianReaderDemo.ipynb +++ b/docs/docs/examples/data_connectors/ObsidianReaderDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/PsychicDemo.ipynb b/docs/docs/examples/data_connectors/PsychicDemo.ipynb index d58c75c3db46b..1ff4ea95938a8 100644 --- a/docs/docs/examples/data_connectors/PsychicDemo.ipynb +++ b/docs/docs/examples/data_connectors/PsychicDemo.ipynb @@ -5,7 +5,7 @@ "id": "8c35f3c4", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/data_connectors/html_tag_reader.ipynb b/docs/docs/examples/data_connectors/html_tag_reader.ipynb index 6d366b0adf7b6..0c3074c01a9a7 100644 --- a/docs/docs/examples/data_connectors/html_tag_reader.ipynb +++ b/docs/docs/examples/data_connectors/html_tag_reader.ipynb @@ -5,7 +5,7 @@ "id": "d70938ec", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/docstore/DocstoreDemo.ipynb b/docs/docs/examples/docstore/DocstoreDemo.ipynb index df3c9e5162d01..fe3d5b5aee1d4 100644 --- a/docs/docs/examples/docstore/DocstoreDemo.ipynb +++ b/docs/docs/examples/docstore/DocstoreDemo.ipynb @@ -9,7 +9,7 @@ "\n", "This guide shows you how to directly use our `DocumentStore` abstraction. By putting nodes in the docstore, this allows you to define multiple indices over the same underlying docstore, instead of duplicating data across indices.\n", "\n", - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb b/docs/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb index 5b2d147a46617..2dc5f5b973d89 100644 --- a/docs/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb +++ b/docs/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb @@ -9,7 +9,7 @@ "\n", "This guide shows you how to directly use our `DocumentStore` abstraction backed by DynamoDB. By putting nodes in the docstore, this allows you to define multiple indices over the same underlying docstore, instead of duplicating data across indices.\n", "\n", - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/docstore/FirestoreDemo.ipynb b/docs/docs/examples/docstore/FirestoreDemo.ipynb index d9c5ff10b2b64..9f4296c8e7716 100644 --- a/docs/docs/examples/docstore/FirestoreDemo.ipynb +++ b/docs/docs/examples/docstore/FirestoreDemo.ipynb @@ -8,7 +8,7 @@ "\n", "This guide shows you how to directly use our `DocumentStore` abstraction backed by Google Firestore. By putting nodes in the docstore, this allows you to define multiple indices over the same underlying docstore, instead of duplicating data across indices.\n", "\n", - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/docstore/MongoDocstoreDemo.ipynb b/docs/docs/examples/docstore/MongoDocstoreDemo.ipynb index 8af5f771c890a..88cb20fca1daa 100644 --- a/docs/docs/examples/docstore/MongoDocstoreDemo.ipynb +++ b/docs/docs/examples/docstore/MongoDocstoreDemo.ipynb @@ -9,7 +9,7 @@ "\n", "This guide shows you how to directly use our `DocumentStore` abstraction backed by MongoDB. By putting nodes in the docstore, this allows you to define multiple indices over the same underlying docstore, instead of duplicating data across indices.\n", "\n", - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb b/docs/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb index 23b7a86b7b544..a163131777b1a 100644 --- a/docs/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb +++ b/docs/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb @@ -5,7 +5,7 @@ "id": "c59e5e4e", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/Anyscale.ipynb b/docs/docs/examples/embeddings/Anyscale.ipynb index 7193b1f232e3b..f82f69c8ae1cd 100644 --- a/docs/docs/examples/embeddings/Anyscale.ipynb +++ b/docs/docs/examples/embeddings/Anyscale.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/Langchain.ipynb b/docs/docs/examples/embeddings/Langchain.ipynb index 97b3437ce4341..b20ea3a692b2d 100644 --- a/docs/docs/examples/embeddings/Langchain.ipynb +++ b/docs/docs/examples/embeddings/Langchain.ipynb @@ -9,7 +9,7 @@ "\n", "This guide shows you how to use embedding models from [LangChain](https://python.langchain.com/docs/integrations/text_embedding/).\n", "\n", - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/OpenAI.ipynb b/docs/docs/examples/embeddings/OpenAI.ipynb index 9697d2d7270bc..b5474da2893ed 100644 --- a/docs/docs/examples/embeddings/OpenAI.ipynb +++ b/docs/docs/examples/embeddings/OpenAI.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/alephalpha.ipynb b/docs/docs/examples/embeddings/alephalpha.ipynb index 82bceabb46129..a2c42b3bdd54a 100644 --- a/docs/docs/examples/embeddings/alephalpha.ipynb +++ b/docs/docs/examples/embeddings/alephalpha.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/bedrock.ipynb b/docs/docs/examples/embeddings/bedrock.ipynb index c7fe8fe931b72..0f91ce15701a5 100644 --- a/docs/docs/examples/embeddings/bedrock.ipynb +++ b/docs/docs/examples/embeddings/bedrock.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/custom_embeddings.ipynb b/docs/docs/examples/embeddings/custom_embeddings.ipynb index 647a761932321..b6b1956ce6b62 100644 --- a/docs/docs/examples/embeddings/custom_embeddings.ipynb +++ b/docs/docs/examples/embeddings/custom_embeddings.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -88,9 +88,9 @@ " instruction: str = \"Represent a document for semantic search:\",\n", " **kwargs: Any,\n", " ) -> None:\n", + " super().__init__(**kwargs)\n", " self._model = INSTRUCTOR(instructor_model_name)\n", " self._instruction = instruction\n", - " super().__init__(**kwargs)\n", "\n", " @classmethod\n", " def class_name(cls) -> str:\n", diff --git a/docs/docs/examples/embeddings/deepinfra.ipynb b/docs/docs/examples/embeddings/deepinfra.ipynb index 591f1de537168..3f83759d7da74 100644 --- a/docs/docs/examples/embeddings/deepinfra.ipynb +++ b/docs/docs/examples/embeddings/deepinfra.ipynb @@ -6,7 +6,7 @@ "metadata": {}, "outputs": [], "source": [ - "\"Open\n" + "\"Open\n" ] }, { diff --git a/docs/docs/examples/embeddings/fireworks.ipynb b/docs/docs/examples/embeddings/fireworks.ipynb index 7ebeeb8f19844..d3b4daec65ef0 100644 --- a/docs/docs/examples/embeddings/fireworks.ipynb +++ b/docs/docs/examples/embeddings/fireworks.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/nvidia.ipynb b/docs/docs/examples/embeddings/nvidia.ipynb index db7beaf3636d9..91cb1b732649f 100644 --- a/docs/docs/examples/embeddings/nvidia.ipynb +++ b/docs/docs/examples/embeddings/nvidia.ipynb @@ -128,12 +128,33 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/raspawar/Desktop/llama_index/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/llama_index/embeddings/nvidia/base.py:161: UserWarning: Default model is set as: NV-Embed-QA. \n", + "Set model using model parameter. \n", + "To get available models use available_models property.\n", + " warnings.warn(\n" + ] + }, + { + "data": { + "text/plain": [ + "[Model(id='NV-Embed-QA', base_model=None)]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "from llama_index.embeddings.nvidia import NVIDIAEmbedding\n", "\n", "# connect to an embedding NIM running at localhost:8080\n", - "embedder = NVIDIAEmbeddings(base_url=\"http://localhost:8080/v1\")\n", + "embedder = NVIDIAEmbedding(base_url=\"http://localhost:8080/v1\")\n", "embedder.available_models" ] }, diff --git a/docs/docs/examples/embeddings/oci_genai.ipynb b/docs/docs/examples/embeddings/oci_genai.ipynb index f3e6e6476a33d..f59422484fd3b 100644 --- a/docs/docs/examples/embeddings/oci_genai.ipynb +++ b/docs/docs/examples/embeddings/oci_genai.ipynb @@ -6,7 +6,7 @@ "id": "6d1ca9ac", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/ollama_embedding.ipynb b/docs/docs/examples/embeddings/ollama_embedding.ipynb index 0fc06d648449b..536d8164c4168 100644 --- a/docs/docs/examples/embeddings/ollama_embedding.ipynb +++ b/docs/docs/examples/embeddings/ollama_embedding.ipynb @@ -5,7 +5,7 @@ "id": "efe8f603c3a1ea67", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/text_embedding_inference.ipynb b/docs/docs/examples/embeddings/text_embedding_inference.ipynb index d7b8d5c68b5d2..2a08337c09c6b 100644 --- a/docs/docs/examples/embeddings/text_embedding_inference.ipynb +++ b/docs/docs/examples/embeddings/text_embedding_inference.ipynb @@ -16,7 +16,7 @@ "\n", "This notebook demonstrates how to configure `TextEmbeddingInference` embeddings.\n", "\n", - "The first step is to deploy the embeddings server. For detailed instructions, see the [official repository for Text Embeddings Inference](https://github.com/huggingface/text-embeddings-inference).\n", + "The first step is to deploy the embeddings server. For detailed instructions, see the [official repository for Text Embeddings Inference](https://github.com/huggingface/text-embeddings-inference). Or [tei-gaudi repository](https://github.com/huggingface/tei-gaudi) if you are deploying on Habana Gaudi/Gaudi 2. \n", "\n", "Once deployed, the code below will connect to and submit embeddings for inference." ] diff --git a/docs/docs/examples/embeddings/together.ipynb b/docs/docs/examples/embeddings/together.ipynb index b132125c4e27c..e81c06dbbb06d 100644 --- a/docs/docs/examples/embeddings/together.ipynb +++ b/docs/docs/examples/embeddings/together.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/embeddings/upstage.ipynb b/docs/docs/examples/embeddings/upstage.ipynb index e1252bee5acc2..0c00df9f5f69e 100644 --- a/docs/docs/examples/embeddings/upstage.ipynb +++ b/docs/docs/examples/embeddings/upstage.ipynb @@ -28,7 +28,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install llama-index-embeddings-upstage==0.1.0" + "%pip install llama-index-embeddings-upstage==0.2.1" ] }, { diff --git a/docs/docs/examples/evaluation/BeirEvaluation.ipynb b/docs/docs/examples/evaluation/BeirEvaluation.ipynb index fc5a973f2774f..9c3f4b04a4458 100644 --- a/docs/docs/examples/evaluation/BeirEvaluation.ipynb +++ b/docs/docs/examples/evaluation/BeirEvaluation.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/Deepeval.ipynb b/docs/docs/examples/evaluation/Deepeval.ipynb index c91b3fdcb63b9..5fadbd3e7da29 100644 --- a/docs/docs/examples/evaluation/Deepeval.ipynb +++ b/docs/docs/examples/evaluation/Deepeval.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/HotpotQADistractor.ipynb b/docs/docs/examples/evaluation/HotpotQADistractor.ipynb index 667356c07b355..94c28551f3ec2 100644 --- a/docs/docs/examples/evaluation/HotpotQADistractor.ipynb +++ b/docs/docs/examples/evaluation/HotpotQADistractor.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/QuestionGeneration.ipynb b/docs/docs/examples/evaluation/QuestionGeneration.ipynb index b327f7d0ef338..fbf51e902b839 100644 --- a/docs/docs/examples/evaluation/QuestionGeneration.ipynb +++ b/docs/docs/examples/evaluation/QuestionGeneration.ipynb @@ -5,7 +5,7 @@ "id": "af775680", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/RAGChecker.ipynb b/docs/docs/examples/evaluation/RAGChecker.ipynb index 558a7c4125792..0bc7d26a10d42 100644 --- a/docs/docs/examples/evaluation/RAGChecker.ipynb +++ b/docs/docs/examples/evaluation/RAGChecker.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/RetryQuery.ipynb b/docs/docs/examples/evaluation/RetryQuery.ipynb index e399db922a350..ed056e79ef46c 100644 --- a/docs/docs/examples/evaluation/RetryQuery.ipynb +++ b/docs/docs/examples/evaluation/RetryQuery.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/TonicValidateEvaluators.ipynb b/docs/docs/examples/evaluation/TonicValidateEvaluators.ipynb index f13eff89b4a38..9c6946c51204b 100644 --- a/docs/docs/examples/evaluation/TonicValidateEvaluators.ipynb +++ b/docs/docs/examples/evaluation/TonicValidateEvaluators.ipynb @@ -5,7 +5,7 @@ "id": "55c4401c-71f2-4cf2-a7c3-5cd490302e1b", "metadata": {}, "source": [ - "\n", + "\n", " \"Open\n", "" ] diff --git a/docs/docs/examples/evaluation/answer_and_context_relevancy.ipynb b/docs/docs/examples/evaluation/answer_and_context_relevancy.ipynb index a61c0be134cb1..8de4c40653909 100644 --- a/docs/docs/examples/evaluation/answer_and_context_relevancy.ipynb +++ b/docs/docs/examples/evaluation/answer_and_context_relevancy.ipynb @@ -5,7 +5,7 @@ "id": "9ad59b9d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/correctness_eval.ipynb b/docs/docs/examples/evaluation/correctness_eval.ipynb index 8e8cb14ab1249..d816f1066f1f7 100644 --- a/docs/docs/examples/evaluation/correctness_eval.ipynb +++ b/docs/docs/examples/evaluation/correctness_eval.ipynb @@ -5,7 +5,7 @@ "id": "79c6ea87", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/guideline_eval.ipynb b/docs/docs/examples/evaluation/guideline_eval.ipynb index aab198b3e0e0e..3d8739e5f91cb 100644 --- a/docs/docs/examples/evaluation/guideline_eval.ipynb +++ b/docs/docs/examples/evaluation/guideline_eval.ipynb @@ -5,7 +5,7 @@ "id": "9e22fa55", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/mt_bench_human_judgement.ipynb b/docs/docs/examples/evaluation/mt_bench_human_judgement.ipynb index 077b73be239ce..b2ccb1bef4d8a 100644 --- a/docs/docs/examples/evaluation/mt_bench_human_judgement.ipynb +++ b/docs/docs/examples/evaluation/mt_bench_human_judgement.ipynb @@ -5,7 +5,7 @@ "id": "4bc26779", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/mt_bench_single_grading.ipynb b/docs/docs/examples/evaluation/mt_bench_single_grading.ipynb index 9d5a9ccc59012..132bb4d58da88 100644 --- a/docs/docs/examples/evaluation/mt_bench_single_grading.ipynb +++ b/docs/docs/examples/evaluation/mt_bench_single_grading.ipynb @@ -5,7 +5,7 @@ "id": "39bbe88b", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/evaluation/multi_modal/multi_modal_rag_evaluation.ipynb b/docs/docs/examples/evaluation/multi_modal/multi_modal_rag_evaluation.ipynb index 1b387513472f1..2a42b875b6205 100644 --- a/docs/docs/examples/evaluation/multi_modal/multi_modal_rag_evaluation.ipynb +++ b/docs/docs/examples/evaluation/multi_modal/multi_modal_rag_evaluation.ipynb @@ -5,7 +5,7 @@ "id": "f064137a-a77f-4745-bae5-6b7eb3cbbf3f", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/finetuning/embeddings/finetune_corpus_embedding.ipynb b/docs/docs/examples/finetuning/embeddings/finetune_corpus_embedding.ipynb new file mode 100644 index 0000000000000..277d5bc9a8aa1 --- /dev/null +++ b/docs/docs/examples/finetuning/embeddings/finetune_corpus_embedding.ipynb @@ -0,0 +1,485 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Finetuning corpus embeddings using NUDGE\n", + "[NUDGE](https://www.arxiv.org/abs/2409.02343) is a novel simple and lightweight fine-tuning method that boosts accuracy when retrieving text using semantic similarity with pre-trained embedding models. NUDGE directly modifies the embeddings of data records to maximize the similarity between training queries and their ground-truth answers. NUDGE does so non-parametrically. Non-parametric means that NUDGE does not modify model parameters to generate better embeddings, as fine-tuning the embedding model, or training adaptors would. Instead, NUDGE directly changes the embeddings themselves. Compared with fine-tuning the pre-trained model and training adaptors, NUDGE provides 3.3x and 4.3x higher increase in accuracy and runs 200x and 3x faster, respectively. [Here](https://data-people-group.github.io/blogs/2024/09/05/nudge/) is a blog post on NUDGE, and [here](https://www.arxiv.org/abs/2409.02343) is the paper with more details.\n", + "\n", + "We demonstrate NUDGE's effectiveness on a commonly used Information Retrieval benchmark called Scifact." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install llama-index-experimental llama-index-embeddings-huggingface torch datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load the scifact benchmark" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.finetuning import EmbeddingQAFinetuneDataset\n", + "from datasets import load_dataset\n", + "\n", + "\n", + "def load_hf_dataset(dataset_name):\n", + " hf_dataset_name = f\"sepz/{dataset_name}_ft\"\n", + " corpus = load_dataset(hf_dataset_name, \"data_records\", split=\"train\")\n", + "\n", + " queries_train = load_dataset(hf_dataset_name, \"qs\", split=\"train\")\n", + " queries_validation = load_dataset(hf_dataset_name, \"qs\", split=\"dev\")\n", + " queries_test = load_dataset(hf_dataset_name, \"qs\", split=\"test\")\n", + "\n", + " qrels_train = load_dataset(hf_dataset_name, \"qs_rel\", split=\"train\")\n", + " qrels_validation = load_dataset(hf_dataset_name, \"qs_rel\", split=\"dev\")\n", + " qrels_test = load_dataset(hf_dataset_name, \"qs_rel\", split=\"test\")\n", + "\n", + " corpus = {\n", + " str(corpus[i][\"record_id\"]): corpus[i][\"text\"]\n", + " for i in range(len(corpus))\n", + " }\n", + "\n", + " queries_train = {\n", + " str(queries_train[i][\"q_id\"]): queries_train[i][\"input\"]\n", + " for i in range(len(queries_train))\n", + " }\n", + " queries_validation = {\n", + " str(r[\"q_id\"]): r[\"input\"] for r in queries_validation\n", + " }\n", + " queries_test = {str(r[\"q_id\"]): r[\"input\"] for r in queries_test}\n", + "\n", + " qrels_train = (\n", + " qrels_train.to_pandas()\n", + " .groupby(\"q_id\")[\"record_id\"]\n", + " .apply(list)\n", + " .to_dict()\n", + " )\n", + " qrels_validation = (\n", + " qrels_validation.to_pandas()\n", + " .groupby(\"q_id\")[\"record_id\"]\n", + " .apply(list)\n", + " .to_dict()\n", + " )\n", + " qrels_test = (\n", + " qrels_test.to_pandas()\n", + " .groupby(\"q_id\")[\"record_id\"]\n", + " .apply(list)\n", + " .to_dict()\n", + " )\n", + " # convert to strings\n", + " qrels_train = {str(k): [str(i) for i in v] for k, v in qrels_train.items()}\n", + " qrels_validation = {\n", + " str(k): [str(i) for i in v] for k, v in qrels_validation.items()\n", + " }\n", + " qrels_test = {str(k): [str(i) for i in v] for k, v in qrels_test.items()}\n", + "\n", + " # Load the dataset\n", + " train_dataset = EmbeddingQAFinetuneDataset(\n", + " corpus=corpus, queries=queries_train, relevant_docs=qrels_train\n", + " )\n", + " validation_dataset = EmbeddingQAFinetuneDataset(\n", + " corpus=corpus,\n", + " queries=queries_validation,\n", + " relevant_docs=qrels_validation,\n", + " )\n", + " test_dataset = EmbeddingQAFinetuneDataset(\n", + " corpus=corpus, queries=queries_test, relevant_docs=qrels_test\n", + " )\n", + "\n", + " return train_dataset, validation_dataset, test_dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load the dataset and base embedding model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.embeddings import resolve_embed_model\n", + "\n", + "train_dataset, val_dataset, test_dataset = load_hf_dataset(\"scifact\")\n", + "base_embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we take a peek at the dataset, we can see that its structured as\n", + "- courpus: mapping of document ID to text\n", + "- queries: mapping of query ID to query text\n", + "- relevant_docs: a mapping of query ID to list of document IDs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Depletion of nitric oxide is responsible for vasospasm.\n" + ] + } + ], + "source": [ + "print(val_dataset.queries[\"2\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['552']\n" + ] + } + ], + "source": [ + "print(val_dataset.relevant_docs[\"2\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CONTEXT Delayed cerebral vasospasm causes permanent neurological deficits or death in at least 15% of patients following otherwise successful treatment for ruptured intracranial aneurysm. Decreased bioavailability of nitric oxide has been associated with the development of cerebral vasospasm. OBJECTIVE To determine whether infusions of nitrite will prevent delayed cerebral vasospasm. DESIGN, SETTING, AND SUBJECTS A total of 14 anesthetized cynomolgus monkeys had an autologous blood clot placed around the right middle cerebral artery. Cerebral arteriography was performed before clot placement and on days 7 and 14 to assess vasospasm. The study was conducted from August 2003 to February 2004. INTERVENTIONS A 90-mg sodium nitrite intravenous solution infused over 24 hours plus a 45-mg sodium nitrite bolus daily (n = 3); a 180-mg sodium nitrite intravenous solution infused over 24 hours (n = 3); or a control saline solution infusion (n = 8). Each was infused continuously for 14 days. MAIN OUTCOME MEASURES Nitrite, S-nitrosothiol, and methemoglobin levels in blood and cerebrospinal fluid and degree of arteriographic vasospasm. RESULTS In control monkeys, mean (SD) cerebrospinal fluid nitrite levels decreased from 3.1 (1.5) micromol/L to 0.4 (0.1) micromol/L at day 7 and to 0.4 (0.4) micromol/L at day 14 (P = .03). All 8 control monkeys developed significant vasospasm of the right middle cerebral artery, which was complicated by stroke and death in 1 animal. Sodium nitrite infusions increased the nitrite and methemoglobin levels (<2.1% of total hemoglobin) in the blood and cerebrospinal fluid without evoking systemic hypotension. Nitrite infusion prevented development of vasospasm (no animals developed significant vasospasm; mean [SD] reduction in right middle cerebral artery area on day 7 after subarachnoid hemorrhage of 8% [9%] in nitrite-treated monkeys vs 47% [5%] in saline-treated controls; P<.001). There was a negative correlation between the concentration of nitrite in cerebrospinal fluid and the degree of cerebral vasospasm (P<.001). Pharmacological effects of nitrite infusion were also associated with the formation of S-nitrosothiol in cerebrospinal fluid. There was no clinical or pathological evidence of nitrite toxicity. CONCLUSION Subacute sodium nitrite infusions prevented delayed cerebral vasospasm in a primate model of subarachnoid hemorrhage.\n" + ] + } + ], + "source": [ + "print(val_dataset.corpus[\"552\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Using your own Datasets\n", + "\n", + "As you can see, you can run this notebook on any dataset, as long as you have queries and a mapping to relevant documents! \n", + "\n", + "If you wanted, you could also write your own dataset, or even use llama-index to create your own.\n", + "\n", + "Uncomment the code below and add your own files if you want to try it out." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# This code would generate your own dataset against your own custom data\n", + "from llama_index.finetuning import generate_qa_embedding_pairs\n", + "from llama_index.core import SimpleDirectoryReader\n", + "from llama_index.core.node_parser import SentenceSplitter\n", + "from llama_index.core.evaluation import EmbeddingQAFinetuneDataset\n", + "\n", + "\n", + "def load_corpus(files, verbose=False):\n", + " if verbose:\n", + " print(f\"Loading files {files}\")\n", + "\n", + " reader = SimpleDirectoryReader(input_files=files)\n", + " docs = reader.load_data()\n", + " if verbose:\n", + " print(f\"Loaded {len(docs)} docs\")\n", + "\n", + " parser = SentenceSplitter()\n", + " nodes = parser.get_nodes_from_documents(docs, show_progress=verbose)\n", + "\n", + " if verbose:\n", + " print(f\"Parsed {len(nodes)} nodes\")\n", + "\n", + " return nodes\n", + "\n", + "\n", + "# Load data\n", + "# train_nodes = load_corpus([\"file1.pdf\", ...], verbose=True)\n", + "# val_nodes = load_corpus([\"file2.pdf\", ...], verbose=True)\n", + "\n", + "# Generate pairs\n", + "# train_dataset = generate_qa_embedding_pairs(train_nodes)\n", + "# val_dataset = generate_qa_embedding_pairs(val_nodes)\n", + "\n", + "# [Optional] Save to disk\n", + "# train_dataset.save_json(\"train_dataset.json\")\n", + "# val_dataset.save_json(\"val_dataset.json\")\n", + "\n", + "# [Optional] Load\n", + "# train_dataset = EmbeddingQAFinetuneDataset.from_json(\"train_dataset.json\")\n", + "# val_dataset = EmbeddingQAFinetuneDataset.from_json(\"val_dataset.json\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Evaluation\n", + "A common Information Retrieval metric to report during evaluation is NDCG@k." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Optional\n", + "\n", + "import torch\n", + "import numpy as np\n", + "from tqdm import tqdm\n", + "from llama_index.core.schema import TextNode\n", + "from llama_index.core.base.embeddings.base import BaseEmbedding\n", + "from llama_index.core.base.base_retriever import BaseRetriever\n", + "from llama_index.core import VectorStoreIndex\n", + "\n", + "\n", + "def build_retriever(\n", + " dataset: EmbeddingQAFinetuneDataset,\n", + " embed_model: BaseEmbedding | str,\n", + " corpus_embeddings: Optional[torch.Tensor] = None,\n", + " k: int = 10,\n", + ") -> BaseRetriever:\n", + " corpus = dataset.corpus\n", + "\n", + " nodes = []\n", + " for i, (id_, text) in enumerate(corpus.items()):\n", + " if corpus_embeddings is not None:\n", + " nodes.append(\n", + " TextNode(\n", + " id_=id_, text=text, embedding=corpus_embeddings[i].tolist()\n", + " )\n", + " )\n", + " else:\n", + " nodes.append(TextNode(id_=id_, text=text))\n", + "\n", + " index = VectorStoreIndex(\n", + " nodes=nodes,\n", + " embeddings=corpus_embeddings,\n", + " embed_model=embed_model,\n", + " show_progress=True,\n", + " )\n", + " return index.as_retriever(similarity_top_k=k)\n", + "\n", + "\n", + "def ndcg_at_k(\n", + " dataset: EmbeddingQAFinetuneDataset, retriever: BaseRetriever, k: int = 10\n", + "):\n", + " queries = dataset.queries\n", + " relevant_docs = dataset.relevant_docs\n", + "\n", + " ndcg_scores = []\n", + " for query_id, query in tqdm(queries.items()):\n", + " retrieved_nodes = retriever.retrieve(query)\n", + " retrieved_ids = [node.node.node_id for node in retrieved_nodes]\n", + " expected_ids = relevant_docs[query_id]\n", + "\n", + " # Calculate NDCG\n", + " ideal_dcg = np.sum(\n", + " [1 / np.log2(i + 2) for i in range(min(k, len(expected_ids)))]\n", + " )\n", + " rel_scores = np.zeros(k)\n", + " for j in range(min(k, len(retrieved_ids))):\n", + " if retrieved_ids[j] in expected_ids:\n", + " rel_scores[j] = 1\n", + " dcg = np.sum(\n", + " [rel_scores[i] / np.log2(i + 2) for i in range(len(rel_scores))]\n", + " )\n", + " ndcg = dcg / ideal_dcg if ideal_dcg > 0 else 0\n", + "\n", + " ndcg_scores.append(ndcg)\n", + "\n", + " mean_ndcg = np.mean(ndcg_scores)\n", + " return mean_ndcg" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Get the corpus embedding finetuning results\n", + "Next we use, [NUDGE](https://www.arxiv.org/abs/2409.02343), the state of the art method for finetuning corpus embeddings to maximize the accuracy of k-NN retrieval. We then take our new corpus embeddings along with the original embedding model to build a retriever. NUDGE only finetunes the corpus embeddings and does not change any of the parameters in the base embedding model. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO:llama_index.experimental.nudge.base:Use pytorch device: cuda\n", + "Use pytorch device: cuda\n" + ] + } + ], + "source": [ + "%%capture\n", + "from llama_index.experimental import Nudge\n", + "\n", + "k = 10\n", + "\n", + "nudge = Nudge(\n", + " train_dataset=train_dataset,\n", + " val_dataset=val_dataset,\n", + " embed_model=base_embed_model,\n", + " epochs=10000,\n", + " train_batch_size=len(train_dataset.queries),\n", + " val_batch_size=len(val_dataset.queries),\n", + ")\n", + "nudge.finetune()\n", + "nudge_corpus_embeddings = nudge.get_finetuned_corpus_embeddings()\n", + "nudge_retriever = build_retriever(\n", + " train_dataset, base_embed_model, nudge_corpus_embeddings, k=k\n", + ")\n", + "nudge_ndcg_test = ndcg_at_k(test_dataset, nudge_retriever, k)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Get the adapter finetuning results\n", + "We use a smaller batchsize than NUDGE above due to the adapter finetune baseline having a significantly slower training process. We also note that even with a batchsize the size of the dataset and 10k epochs the adapter finetuned model performs similarly to the hyperparams currently used." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO:llama_index.finetuning.embeddings.adapter:Use pytorch device: cuda\n", + "Use pytorch device: cuda\n", + "INFO:llama_index.embeddings.adapter.base:Use pytorch device: cuda\n", + "Use pytorch device: cuda\n" + ] + } + ], + "source": [ + "%%capture\n", + "from llama_index.finetuning import EmbeddingAdapterFinetuneEngine\n", + "\n", + "embedding_adapater_finetune_engine = EmbeddingAdapterFinetuneEngine(\n", + " train_dataset,\n", + " base_embed_model,\n", + " epochs=4,\n", + " batch_size=10,\n", + ")\n", + "embedding_adapater_finetune_engine.finetune()\n", + "embedding_adapter_model = (\n", + " embedding_adapater_finetune_engine.get_finetuned_model()\n", + ")\n", + "ft_retriever = build_retriever(train_dataset, embedding_adapter_model, k=k)\n", + "ft_ndcg_test = ndcg_at_k(test_dataset, ft_retriever, k)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Get the baseline results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%capture\n", + "\n", + "base_retriever = build_retriever(train_dataset, base_embed_model, k=k)\n", + "bge_ndcg_test = ndcg_at_k(test_dataset, base_retriever, k)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Display the results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "bge test - ndcg@10: 0.71\n", + "adaptor finetune test - ndcg@10: 0.72\n", + "NUDGE test - ndcg@10: 0.83\n" + ] + } + ], + "source": [ + "print(f\"bge test - ndcg@10: {bge_ndcg_test:.2f}\")\n", + "print(f\"adaptor finetune test - ndcg@10: {ft_ndcg_test:.2f}\")\n", + "print(f\"NUDGE test - ndcg@10: {nudge_ndcg_test:.2f}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/examples/finetuning/openai_fine_tuning_functions.ipynb b/docs/docs/examples/finetuning/openai_fine_tuning_functions.ipynb index 223e57722ba9f..3724f2cc6aea7 100644 --- a/docs/docs/examples/finetuning/openai_fine_tuning_functions.ipynb +++ b/docs/docs/examples/finetuning/openai_fine_tuning_functions.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/finetuning/rerankers/cohere_custom_reranker.ipynb b/docs/docs/examples/finetuning/rerankers/cohere_custom_reranker.ipynb index 012f56355ce15..aaf5c8426eaec 100644 --- a/docs/docs/examples/finetuning/rerankers/cohere_custom_reranker.ipynb +++ b/docs/docs/examples/finetuning/rerankers/cohere_custom_reranker.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/index_structs/doc_summary/DocSummary.ipynb b/docs/docs/examples/index_structs/doc_summary/DocSummary.ipynb index e9f861fc08c53..aae60cc2cbeb1 100644 --- a/docs/docs/examples/index_structs/doc_summary/DocSummary.ipynb +++ b/docs/docs/examples/index_structs/doc_summary/DocSummary.ipynb @@ -5,7 +5,7 @@ "id": "20de0e22", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/index_structs/struct_indices/SQLIndexDemo.ipynb b/docs/docs/examples/index_structs/struct_indices/SQLIndexDemo.ipynb index 4222d820953d8..833110e649f5a 100644 --- a/docs/docs/examples/index_structs/struct_indices/SQLIndexDemo.ipynb +++ b/docs/docs/examples/index_structs/struct_indices/SQLIndexDemo.ipynb @@ -5,7 +5,7 @@ "id": "1a558ee1", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/index_structs/struct_indices/duckdb_sql_query.ipynb b/docs/docs/examples/index_structs/struct_indices/duckdb_sql_query.ipynb index a2af0fe02d08d..02cb61d3b398b 100644 --- a/docs/docs/examples/index_structs/struct_indices/duckdb_sql_query.ipynb +++ b/docs/docs/examples/index_structs/struct_indices/duckdb_sql_query.ipynb @@ -6,7 +6,7 @@ "id": "e4d8172b", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/ingestion/advanced_ingestion_pipeline.ipynb b/docs/docs/examples/ingestion/advanced_ingestion_pipeline.ipynb index 48ff5533a2d39..38f10b712b6bf 100644 --- a/docs/docs/examples/ingestion/advanced_ingestion_pipeline.ipynb +++ b/docs/docs/examples/ingestion/advanced_ingestion_pipeline.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/ingestion/parallel_execution_ingestion_pipeline.ipynb b/docs/docs/examples/ingestion/parallel_execution_ingestion_pipeline.ipynb index e35be36a26b0b..ea4cd1d22617c 100644 --- a/docs/docs/examples/ingestion/parallel_execution_ingestion_pipeline.ipynb +++ b/docs/docs/examples/ingestion/parallel_execution_ingestion_pipeline.ipynb @@ -5,7 +5,7 @@ "id": "d1de0f1a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llama_dataset/downloading_llama_datasets.ipynb b/docs/docs/examples/llama_dataset/downloading_llama_datasets.ipynb index 9ee44266f6d90..1662be6bba6b4 100644 --- a/docs/docs/examples/llama_dataset/downloading_llama_datasets.ipynb +++ b/docs/docs/examples/llama_dataset/downloading_llama_datasets.ipynb @@ -5,7 +5,7 @@ "id": "28d433f8", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llama_dataset/labelled-rag-datasets.ipynb b/docs/docs/examples/llama_dataset/labelled-rag-datasets.ipynb index 2524f9197b221..2be07a76bc8c9 100644 --- a/docs/docs/examples/llama_dataset/labelled-rag-datasets.ipynb +++ b/docs/docs/examples/llama_dataset/labelled-rag-datasets.ipynb @@ -5,7 +5,7 @@ "id": "e8761049", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llama_dataset/ragdataset_submission_template.ipynb b/docs/docs/examples/llama_dataset/ragdataset_submission_template.ipynb index 8c11fe91b4b10..240f3154e6c4f 100644 --- a/docs/docs/examples/llama_dataset/ragdataset_submission_template.ipynb +++ b/docs/docs/examples/llama_dataset/ragdataset_submission_template.ipynb @@ -5,7 +5,7 @@ "id": "3cfd446f", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llama_dataset/uploading_llama_dataset.ipynb b/docs/docs/examples/llama_dataset/uploading_llama_dataset.ipynb index 1970033c698ab..b113732fc48b5 100644 --- a/docs/docs/examples/llama_dataset/uploading_llama_dataset.ipynb +++ b/docs/docs/examples/llama_dataset/uploading_llama_dataset.ipynb @@ -5,7 +5,7 @@ "id": "9c8368b2", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llama_hub/llama_pack_resume.ipynb b/docs/docs/examples/llama_hub/llama_pack_resume.ipynb index 3aaf8f92a978b..20858337c532f 100644 --- a/docs/docs/examples/llama_hub/llama_pack_resume.ipynb +++ b/docs/docs/examples/llama_hub/llama_pack_resume.ipynb @@ -7,7 +7,7 @@ "source": [ "# Llama Pack - Resume Screener 📄\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This example shows you how to use the Resume Screener Llama Pack.\n", "You can find all packs on https://llamahub.ai\n", diff --git a/docs/docs/examples/llama_hub/llama_packs_example.ipynb b/docs/docs/examples/llama_hub/llama_packs_example.ipynb index 4a50c13d6e700..d7ce1e14836ba 100644 --- a/docs/docs/examples/llama_hub/llama_packs_example.ipynb +++ b/docs/docs/examples/llama_hub/llama_packs_example.ipynb @@ -7,7 +7,7 @@ "source": [ "# Llama Packs Example\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This example shows you how to use a simple Llama Pack with VoyageAI. We show the following:\n", "- How to download a Llama Pack\n", diff --git a/docs/docs/examples/llm/ai21.ipynb b/docs/docs/examples/llm/ai21.ipynb index 579f604361a3d..c1b3640e8dec0 100644 --- a/docs/docs/examples/llm/ai21.ipynb +++ b/docs/docs/examples/llm/ai21.ipynb @@ -14,7 +14,8 @@ "source": [ "# AI21\n", "\n", - "This notebook shows how to use AI21's foundation models in LlamaIndex. The default model is `jamba-instruct`. If you want to use the older Jurassic models, specify the model name \"j2-mid\" or \"j2-ultra\"." + "This notebook shows how to use AI21's foundation models in LlamaIndex. The default model is `jamba-1.5-mini`.\n", + "Other supported models are `jamba-1.5-large` and `jamba-instruct`. If you want to use the older Jurassic models, specify the model name `j2-mid` or `j2-ultra`." ] }, { @@ -222,7 +223,7 @@ "from llama_index.llms.ai21 import AI21\n", "\n", "llm = AI21(\n", - " model=\"jamba-instruct\", api_key=api_key, max_tokens=100, temperature=0.5\n", + " model=\"jamba-1.5-mini\", api_key=api_key, max_tokens=100, temperature=0.5\n", ")" ] }, @@ -275,7 +276,7 @@ "from llama_index.llms.ai21 import AI21\n", "from llama_index.core.llms import ChatMessage\n", "\n", - "llm = AI21(api_key=api_key, model=\"jamba-instruct\")\n", + "llm = AI21(api_key=api_key, model=\"jamba-1.5-mini\")\n", "messages = [\n", " ChatMessage(\n", " role=\"system\", content=\"You are a pirate with a colorful personality\"\n", @@ -339,7 +340,7 @@ "source": [ "from llama_index.llms.ai21 import AI21\n", "\n", - "llm = AI21(api_key=api_key, model=\"jamba-instruct\")\n", + "llm = AI21(api_key=api_key, model=\"jamba-1.5-mini\")\n", "\n", "tokenizer = llm.tokenizer\n", "\n", @@ -349,6 +350,64 @@ "\n", "print(decoded)" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Tool Calling" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "from llama_index.llms.ai21 import AI21\n", + "from llama_index.core.tools import FunctionTool\n", + "\n", + "\n", + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiply two integers and returns the result integer\"\"\"\n", + " return a * b\n", + "\n", + "\n", + "def subtract(a: int, b: int) -> int:\n", + " \"\"\"Subtract two integers and returns the result integer\"\"\"\n", + " return a - b\n", + "\n", + "\n", + "def divide(a: int, b: int) -> float:\n", + " \"\"\"Divide two integers and returns the result float\"\"\"\n", + " return a - b\n", + "\n", + "\n", + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two integers and returns the result integer\"\"\"\n", + " return a + b\n", + "\n", + "\n", + "multiply_tool = FunctionTool.from_defaults(fn=multiply)\n", + "add_tool = FunctionTool.from_defaults(fn=add)\n", + "subtract_tool = FunctionTool.from_defaults(fn=subtract)\n", + "divide_tool = FunctionTool.from_defaults(fn=divide)\n", + "\n", + "llm = AI21(model=\"jamba-1.5-mini\", api_key=api_key)\n", + "\n", + "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + " [multiply_tool, add_tool, subtract_tool, divide_tool],\n", + " llm=llm,\n", + " verbose=True,\n", + " allow_parallel_tool_calls=True,\n", + ")\n", + "agent = agent_worker.as_agent()\n", + "\n", + "response = agent.chat(\n", + " \"My friend Moses had 10 apples. He ate 5 apples in the morning. Then he found a box with 25 apples. He divided all his apples between his 5 friends. How many apples did each friend get?\"\n", + ")" + ] } ], "metadata": { diff --git a/docs/docs/examples/llm/alephalpha.ipynb b/docs/docs/examples/llm/alephalpha.ipynb index 3ff23939bf9bf..94dcccaac73e2 100644 --- a/docs/docs/examples/llm/alephalpha.ipynb +++ b/docs/docs/examples/llm/alephalpha.ipynb @@ -5,7 +5,7 @@ "id": "6453d3d5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/anthropic.ipynb b/docs/docs/examples/llm/anthropic.ipynb index d712efa64f9f9..8479462e70246 100644 --- a/docs/docs/examples/llm/anthropic.ipynb +++ b/docs/docs/examples/llm/anthropic.ipynb @@ -5,7 +5,7 @@ "id": "6453d3d5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/anyscale.ipynb b/docs/docs/examples/llm/anyscale.ipynb index 927ce04b038f2..a0b4977aaa974 100644 --- a/docs/docs/examples/llm/anyscale.ipynb +++ b/docs/docs/examples/llm/anyscale.ipynb @@ -5,7 +5,7 @@ "id": "7f7c3284", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/azure_inference.ipynb b/docs/docs/examples/llm/azure_inference.ipynb index b8884d5a726f7..b4f7280935288 100644 --- a/docs/docs/examples/llm/azure_inference.ipynb +++ b/docs/docs/examples/llm/azure_inference.ipynb @@ -5,7 +5,7 @@ "id": "6340e329", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -15,7 +15,7 @@ "source": [ "# Azure AI model inference\n", "\n", - "This notebook explains how to use `llama-index-llm-azure-inference` package with models deployed with the Azure AI model inference API in Azure AI studio or Azure Machine Learning." + "This notebook explains how to use `llama-index-llm-azure-inference` package with models deployed with the Azure AI model inference API in Azure AI studio or Azure Machine Learning. The package also support GitHub Models (Preview) endpoints." ] }, { @@ -68,7 +68,8 @@ "3. Deploy one model supporting the [Azure AI model inference API](https://aka.ms/azureai/modelinference). In this example we use a `Mistral-Large` deployment. \n", "\n", " * You can follow the instructions at [Deploy models as serverless APIs](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models-serverless).\n", - "\n" + "\n", + "Alternatively, you can use GitHub Models endpoints with this integration, including the free tier experience. Read more about [GitHub models](https://github.com/marketplace/models)." ] }, { @@ -119,7 +120,7 @@ "id": "a593031b-c872-4360-8775-dff4844ccead", "metadata": {}, "source": [ - "## Use the model" + "## Connect to your deployment and endpoint" ] }, { @@ -186,7 +187,7 @@ }, { "cell_type": "markdown", - "id": "97fb9877", + "id": "ed641e58", "metadata": {}, "source": [ "If you are planning to use asynchronous calling, it's a best practice to use the asynchronous version for the credentials:" @@ -195,7 +196,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d7bc2c98", + "id": "8aa5e256", "metadata": {}, "outputs": [], "source": [ @@ -209,6 +210,36 @@ ")" ] }, + { + "cell_type": "markdown", + "id": "e3a6ad14", + "metadata": {}, + "source": [ + "If your endpoint is serving more than one model, like [GitHub Models](https://github.com/marketplace/models) or Azure AI Services, then you have to indicate the parameter `model_name`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f95b7416", + "metadata": {}, + "outputs": [], + "source": [ + "llm = AzureAICompletionsModel(\n", + " endpoint=os.environ[\"AZURE_INFERENCE_ENDPOINT\"],\n", + " credential=os.environ[\"AZURE_INFERENCE_CREDENTIAL\"],\n", + " model_name=\"mistral-large\", # change it to the model you want to use\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "f414500c", + "metadata": {}, + "source": [ + "## Use the model" + ] + }, { "cell_type": "markdown", "id": "579ce31c-7b51-471e-bcb5-47da90b3d555", diff --git a/docs/docs/examples/llm/azure_openai.ipynb b/docs/docs/examples/llm/azure_openai.ipynb index 5a6fe79c3428c..7a3d3121724fd 100644 --- a/docs/docs/examples/llm/azure_openai.ipynb +++ b/docs/docs/examples/llm/azure_openai.ipynb @@ -5,7 +5,7 @@ "id": "6340e329", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/cleanlab.ipynb b/docs/docs/examples/llm/cleanlab.ipynb index aa3b7230a2b84..21a7df46746d4 100644 --- a/docs/docs/examples/llm/cleanlab.ipynb +++ b/docs/docs/examples/llm/cleanlab.ipynb @@ -5,7 +5,7 @@ "id": "4d1b897a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/deepinfra.ipynb b/docs/docs/examples/llm/deepinfra.ipynb index 8ae2c1a58d139..08d543f82747a 100644 --- a/docs/docs/examples/llm/deepinfra.ipynb +++ b/docs/docs/examples/llm/deepinfra.ipynb @@ -7,7 +7,7 @@ "metadata": {}, "outputs": [], "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/fireworks.ipynb b/docs/docs/examples/llm/fireworks.ipynb index 2fd9883b88e93..e67751e68dcb0 100644 --- a/docs/docs/examples/llm/fireworks.ipynb +++ b/docs/docs/examples/llm/fireworks.ipynb @@ -5,7 +5,7 @@ "id": "9fd54a32", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/fireworks_cookbook.ipynb b/docs/docs/examples/llm/fireworks_cookbook.ipynb index efe892665028c..d048f7e2d6cca 100644 --- a/docs/docs/examples/llm/fireworks_cookbook.ipynb +++ b/docs/docs/examples/llm/fireworks_cookbook.ipynb @@ -5,7 +5,7 @@ "id": "9fd54a32", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/groq.ipynb b/docs/docs/examples/llm/groq.ipynb index 20e1bdbbd1643..ef24ae820933a 100644 --- a/docs/docs/examples/llm/groq.ipynb +++ b/docs/docs/examples/llm/groq.ipynb @@ -5,7 +5,7 @@ "id": "4d1b897a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/langchain.ipynb b/docs/docs/examples/llm/langchain.ipynb index 74a56ab3ee12f..3c66465765e94 100644 --- a/docs/docs/examples/llm/langchain.ipynb +++ b/docs/docs/examples/llm/langchain.ipynb @@ -5,7 +5,7 @@ "id": "f503d3ec", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/litellm.ipynb b/docs/docs/examples/llm/litellm.ipynb index 2829f7494a4fb..94f076b07b61f 100755 --- a/docs/docs/examples/llm/litellm.ipynb +++ b/docs/docs/examples/llm/litellm.ipynb @@ -5,7 +5,7 @@ "id": "30311f71", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/llama_2.ipynb b/docs/docs/examples/llm/llama_2.ipynb index 58faf193dd640..b73c3c90a97b9 100644 --- a/docs/docs/examples/llm/llama_2.ipynb +++ b/docs/docs/examples/llm/llama_2.ipynb @@ -5,7 +5,7 @@ "id": "bc2079ca", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/llama_2_llama_cpp.ipynb b/docs/docs/examples/llm/llama_2_llama_cpp.ipynb index d707d2bccb7d9..427f24e815db2 100644 --- a/docs/docs/examples/llm/llama_2_llama_cpp.ipynb +++ b/docs/docs/examples/llm/llama_2_llama_cpp.ipynb @@ -5,7 +5,7 @@ "id": "3ac9adb4", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/llama_2_rap_battle.ipynb b/docs/docs/examples/llm/llama_2_rap_battle.ipynb index fbb72030e2c23..e83994f7cffc3 100644 --- a/docs/docs/examples/llm/llama_2_rap_battle.ipynb +++ b/docs/docs/examples/llm/llama_2_rap_battle.ipynb @@ -5,7 +5,7 @@ "id": "c7f0f941", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/llama_api.ipynb b/docs/docs/examples/llm/llama_api.ipynb index f5358a8a77f19..b90e037dc7513 100644 --- a/docs/docs/examples/llm/llama_api.ipynb +++ b/docs/docs/examples/llm/llama_api.ipynb @@ -5,7 +5,7 @@ "id": "7184eab5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/llm_predictor.ipynb b/docs/docs/examples/llm/llm_predictor.ipynb index 5bee613f1454d..7bc3fa8167a58 100644 --- a/docs/docs/examples/llm/llm_predictor.ipynb +++ b/docs/docs/examples/llm/llm_predictor.ipynb @@ -5,7 +5,7 @@ "id": "f8612a38", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/mistralai.ipynb b/docs/docs/examples/llm/mistralai.ipynb index b5677de283c70..a4e91ece70881 100644 --- a/docs/docs/examples/llm/mistralai.ipynb +++ b/docs/docs/examples/llm/mistralai.ipynb @@ -5,7 +5,7 @@ "id": "6453d3d5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -65,7 +65,7 @@ "# otherwise it will lookup MISTRAL_API_KEY from your env variable\n", "# llm = MistralAI(api_key=\"\")\n", "\n", - "llm = MistralAI(api_key=\"d3BWjjKovVOfREc9AICivX0m9JMSzZc6\")\n", + "llm = MistralAI(api_key=\"\")\n", "\n", "resp = llm.complete(\"Paul Graham is \")" ] diff --git a/docs/docs/examples/llm/monsterapi.ipynb b/docs/docs/examples/llm/monsterapi.ipynb index 8454e320c52c9..7ff9dea71495a 100644 --- a/docs/docs/examples/llm/monsterapi.ipynb +++ b/docs/docs/examples/llm/monsterapi.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/neutrino.ipynb b/docs/docs/examples/llm/neutrino.ipynb index c1a6e8fe9147f..412b9a18f13bb 100644 --- a/docs/docs/examples/llm/neutrino.ipynb +++ b/docs/docs/examples/llm/neutrino.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/nvidia.ipynb b/docs/docs/examples/llm/nvidia.ipynb index fedb3dc9d6315..ec60f15273ed4 100644 --- a/docs/docs/examples/llm/nvidia.ipynb +++ b/docs/docs/examples/llm/nvidia.ipynb @@ -585,6 +585,78 @@ "source": [ "streaming_response.print_response_stream()" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tool calling\n", + "Starting in v0.2.1, NVIDIA supports tool calling.\n", + "\n", + "NVIDIA provides integration with the variety of models on build.nvidia.com as well as local NIMs. Not all these models are trained for tool calling. Be sure to select a model that does have tool calling for your experimention and applications.\n", + "\n", + "You can get a list of models that are known to support tool calling with,\n", + "\n", + "`NOTE:` For more examples refer : [nvidia_agent.ipynb](../agent/nvidia_agent.ipynb)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "tool_models = [\n", + " model\n", + " for model in NVIDIA().available_models\n", + " if model.is_function_calling_model\n", + "]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "With a tool capable model," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.tools import FunctionTool\n", + "\n", + "\n", + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiple two integers and returns the result integer\"\"\"\n", + " return a * b\n", + "\n", + "\n", + "multiply_tool = FunctionTool.from_defaults(fn=multiply)\n", + "\n", + "\n", + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two integers and returns the result integer\"\"\"\n", + " return a + b\n", + "\n", + "\n", + "add_tool = FunctionTool.from_defaults(fn=add)\n", + "\n", + "llm = NVIDIA(\"meta/llama-3.1-70b-instruct\")\n", + "from llama_index.core.agent import FunctionCallingAgentWorker\n", + "\n", + "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + " [multiply_tool, add_tool],\n", + " llm=llm,\n", + " verbose=True,\n", + ")\n", + "agent = agent_worker.as_agent()\n", + "\n", + "response = agent.chat(\"What is (121 * 3) + 42?\")\n", + "print(str(response))" + ] } ], "metadata": { diff --git a/docs/docs/examples/llm/nvidia_tensorrt.ipynb b/docs/docs/examples/llm/nvidia_tensorrt.ipynb index 32e030f76f147..abc5caa78574f 100644 --- a/docs/docs/examples/llm/nvidia_tensorrt.ipynb +++ b/docs/docs/examples/llm/nvidia_tensorrt.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/nvidia_text_completion.ipynb b/docs/docs/examples/llm/nvidia_text_completion.ipynb new file mode 100644 index 0000000000000..34244c8ce58d3 --- /dev/null +++ b/docs/docs/examples/llm/nvidia_text_completion.ipynb @@ -0,0 +1,258 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# NVIDIA's LLM Text Completion API\n", + "\n", + "Extending the NVIDIA class to support /completion API's for below models:\n", + "\n", + "- bigcode/starcoder2-7b\n", + "- bigcode/starcoder2-15b" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install --force-reinstall llama_index-llms-nvidia" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n", + "\n", + "**To get started:**\n", + "\n", + "1. Create a free account with [NVIDIA](https://build.nvidia.com/), which hosts NVIDIA AI Foundation models.\n", + "\n", + "2. Click on your model of choice.\n", + "\n", + "3. Under Input select the Python tab, and click `Get API Key`. Then click `Generate Key`.\n", + "\n", + "4. Copy and save the generated key as NVIDIA_API_KEY. From there, you should have access to the endpoints." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!which python" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "# del os.environ['NVIDIA_API_KEY'] ## delete key and reset\n", + "if os.environ.get(\"NVIDIA_API_KEY\", \"\").startswith(\"nvapi-\"):\n", + " print(\"Valid NVIDIA_API_KEY already in environment. Delete to reset\")\n", + "else:\n", + " nvapi_key = getpass.getpass(\"NVAPI Key (starts with nvapi-): \")\n", + " assert nvapi_key.startswith(\n", + " \"nvapi-\"\n", + " ), f\"{nvapi_key[:5]}... is not a valid key\"\n", + " os.environ[\"NVIDIA_API_KEY\"] = nvapi_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os.environ[\"NVIDIA_API_KEY\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# llama-parse is async-first, running the async code in a notebook requires the use of nest_asyncio\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Working with NVIDIA API Catalog" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.nvidia import NVIDIA\n", + "\n", + "llm = NVIDIA(model=\"bigcode/starcoder2-15b\", use_chat_completions=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "llm.available_models" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Working with NVIDIA NIMs\n", + "\n", + "In addition to connecting to hosted [NVIDIA NIMs](https://ai.nvidia.com), this connector can be used to connect to local microservice instances. This helps you take your applications local when necessary.\n", + "\n", + "For instructions on how to setup local microservice instances, see https://developer.nvidia.com/blog/nvidia-nim-offers-optimized-inference-microservices-for-deploying-ai-models-at-scale/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.nvidia import NVIDIA\n", + "\n", + "# connect to an chat NIM running at localhost:8080, spcecifying a specific model\n", + "llm = NVIDIA(base_url=\"http://localhost:8080/v1\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Complete: `.complete()`\n", + "\n", + "We can use `.complete()`/`.acomplete()` (which takes a string) to prompt a response from the selected model.\n", + "\n", + "Let's use our default model for this task." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(llm.complete(\"# Function that does quicksort:\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As is expected by LlamaIndex - we get a `CompletionResponse` in response." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Async Complete: `.acomplete()`\n", + "\n", + "There is also an async implementation which can be leveraged in the same way!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await llm.acomplete(\"# Function that does quicksort:\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Streaming" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "x = llm.stream_complete(prompt=\"# Reverse string in python:\", max_tokens=512)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Async Streaming" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "x = await llm.astream_complete(\n", + " prompt=\"# Reverse program in python:\", max_tokens=512\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async for t in x:\n", + " print(t.delta, end=\"\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/examples/llm/nvidia_triton.ipynb b/docs/docs/examples/llm/nvidia_triton.ipynb index 66be41c519343..c516158badb49 100644 --- a/docs/docs/examples/llm/nvidia_triton.ipynb +++ b/docs/docs/examples/llm/nvidia_triton.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/oci_genai.ipynb b/docs/docs/examples/llm/oci_genai.ipynb index f9a0584f15672..3caeb6ca6d1d0 100644 --- a/docs/docs/examples/llm/oci_genai.ipynb +++ b/docs/docs/examples/llm/oci_genai.ipynb @@ -6,7 +6,7 @@ "id": "6d1ca9ac", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/ollama.ipynb b/docs/docs/examples/llm/ollama.ipynb index fef6be0fba5d2..cc53efe836354 100644 --- a/docs/docs/examples/llm/ollama.ipynb +++ b/docs/docs/examples/llm/ollama.ipynb @@ -5,7 +5,7 @@ "id": "4d1b897a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/ollama_gemma.ipynb b/docs/docs/examples/llm/ollama_gemma.ipynb index 3960aadfa4918..a04a6f980cffe 100644 --- a/docs/docs/examples/llm/ollama_gemma.ipynb +++ b/docs/docs/examples/llm/ollama_gemma.ipynb @@ -5,7 +5,7 @@ "id": "4d1b897a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/openai.ipynb b/docs/docs/examples/llm/openai.ipynb index 85f37fdcea90d..5d681cd9e8af7 100644 --- a/docs/docs/examples/llm/openai.ipynb +++ b/docs/docs/examples/llm/openai.ipynb @@ -5,7 +5,7 @@ "id": "9fd54a32", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -455,7 +455,7 @@ "source": [ "from llama_index.llms.openai import OpenAI\n", "from llama_index.core.prompts import PromptTemplate\n", - "from pydantic.v1 import BaseModel\n", + "from pydantic import BaseModel\n", "from typing import List\n", "\n", "\n", diff --git a/docs/docs/examples/llm/openrouter.ipynb b/docs/docs/examples/llm/openrouter.ipynb index 7bbdb926a884d..cc96e8cf06a30 100644 --- a/docs/docs/examples/llm/openrouter.ipynb +++ b/docs/docs/examples/llm/openrouter.ipynb @@ -5,7 +5,7 @@ "id": "7f7c3284", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/paieas.ipynb b/docs/docs/examples/llm/paieas.ipynb index 4060000d4f8cc..e568f4672fbb2 100644 --- a/docs/docs/examples/llm/paieas.ipynb +++ b/docs/docs/examples/llm/paieas.ipynb @@ -5,7 +5,7 @@ "id": "9fd54a32", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/palm.ipynb b/docs/docs/examples/llm/palm.ipynb index 14917868d79cf..f1942a781d7e1 100644 --- a/docs/docs/examples/llm/palm.ipynb +++ b/docs/docs/examples/llm/palm.ipynb @@ -5,7 +5,7 @@ "id": "4d4991c2", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/portkey.ipynb b/docs/docs/examples/llm/portkey.ipynb index 5f67556d7785f..70c8b1de670a6 100644 --- a/docs/docs/examples/llm/portkey.ipynb +++ b/docs/docs/examples/llm/portkey.ipynb @@ -44,7 +44,7 @@ "\n", "To harness these features, let's start with the setup:\n", "\n", - "\n", + "\n", " \\\"Open\n", "" ] @@ -359,7 +359,7 @@ "- [LLMOptions Constructor](https://github.com/Portkey-AI/rubeus-python-sdk/blob/4cf3e17b847225123e92f8e8467b41d082186d60/rubeus/api_resources/utils.py#L179)\n", "- [List of Portkey + Llamaindex Features](#portkeys-integration-with-llamaindex-adds-the-following-production-capabilities-to-your-apps-out-of-the-box)\n", "\n", - "\n", + "\n", " \\\"Open\n", "\n", "\n" diff --git a/docs/docs/examples/llm/predibase.ipynb b/docs/docs/examples/llm/predibase.ipynb index f196e1b627ebe..c20e9db68220f 100644 --- a/docs/docs/examples/llm/predibase.ipynb +++ b/docs/docs/examples/llm/predibase.ipynb @@ -5,7 +5,7 @@ "id": "4ec7cd6e", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/rungpt.ipynb b/docs/docs/examples/llm/rungpt.ipynb index cd91b6f2252e8..01af7738d81dc 100644 --- a/docs/docs/examples/llm/rungpt.ipynb +++ b/docs/docs/examples/llm/rungpt.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/together.ipynb b/docs/docs/examples/llm/together.ipynb index ba7b1116a2203..1962a57dbe3db 100644 --- a/docs/docs/examples/llm/together.ipynb +++ b/docs/docs/examples/llm/together.ipynb @@ -5,7 +5,7 @@ "id": "4d1b897a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/upstage.ipynb b/docs/docs/examples/llm/upstage.ipynb index 3e4021c976950..a9d4f99f4107b 100644 --- a/docs/docs/examples/llm/upstage.ipynb +++ b/docs/docs/examples/llm/upstage.ipynb @@ -5,7 +5,7 @@ "id": "9fd54a32", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/vicuna.ipynb b/docs/docs/examples/llm/vicuna.ipynb index 9e96df22bcfab..7cf6419cba523 100644 --- a/docs/docs/examples/llm/vicuna.ipynb +++ b/docs/docs/examples/llm/vicuna.ipynb @@ -5,7 +5,7 @@ "id": "c798b3ad", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/llm/xinference_local_deployment.ipynb b/docs/docs/examples/llm/xinference_local_deployment.ipynb index e0790f6d207a5..745974be963b9 100644 --- a/docs/docs/examples/llm/xinference_local_deployment.ipynb +++ b/docs/docs/examples/llm/xinference_local_deployment.ipynb @@ -6,7 +6,7 @@ "id": "c6218e91", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/low_level/ingestion.ipynb b/docs/docs/examples/low_level/ingestion.ipynb index 8f916b8e93161..bbcad29f39ccc 100644 --- a/docs/docs/examples/low_level/ingestion.ipynb +++ b/docs/docs/examples/low_level/ingestion.ipynb @@ -5,7 +5,7 @@ "id": "57c676db", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/low_level/response_synthesis.ipynb b/docs/docs/examples/low_level/response_synthesis.ipynb index 566bf441ffd73..7ca91161f7b31 100644 --- a/docs/docs/examples/low_level/response_synthesis.ipynb +++ b/docs/docs/examples/low_level/response_synthesis.ipynb @@ -5,7 +5,7 @@ "id": "e5df0f51", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/low_level/retrieval.ipynb b/docs/docs/examples/low_level/retrieval.ipynb index f26e84fc4dbd1..faed60438f706 100644 --- a/docs/docs/examples/low_level/retrieval.ipynb +++ b/docs/docs/examples/low_level/retrieval.ipynb @@ -5,7 +5,7 @@ "id": "62ac6c38", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/managed/PostgresMLDemo.ipynb b/docs/docs/examples/managed/PostgresMLDemo.ipynb index 0b20023189feb..ebdaf78d9d792 100644 --- a/docs/docs/examples/managed/PostgresMLDemo.ipynb +++ b/docs/docs/examples/managed/PostgresMLDemo.ipynb @@ -5,7 +5,7 @@ "id": "adf7d63d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/managed/vectaraDemo.ipynb b/docs/docs/examples/managed/vectaraDemo.ipynb index 611c153a77e31..e760573e8ee38 100644 --- a/docs/docs/examples/managed/vectaraDemo.ipynb +++ b/docs/docs/examples/managed/vectaraDemo.ipynb @@ -5,7 +5,7 @@ "id": "adf7d63d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -19,8 +19,8 @@ "Vectara provides an end-to-end managed service for Retrieval Augmented Generation or RAG, which includes:\n", "1. A way to extract text from document files and chunk them into sentences.\n", "2. The state-of-the-art [Boomerang](https://vectara.com/how-boomerang-takes-retrieval-augmented-generation-to-the-next-level-via-grounded-generation/) embeddings model. Each text chunk is encoded into a vector embedding using Boomerang, and stored in the Vectara internal vector store. Thus, when using Vectara with LlamaIndex you do not need to call a separate embedding model - this happens automatically within the Vectara backend.\n", - "3. A query service that automatically encodes the query into embedding, and retrieves the most relevant text segments (including support for [Hybrid Search](https://docs.vectara.com/docs/api-reference/search-apis/lexical-matching) and [MMR](https://vectara.com/get-diverse-results-and-comprehensive-summaries-with-vectaras-mmr-reranker/))\n", - "4. An option to a create [generative summary](https://docs.vectara.com/docs/learn/grounded-generation/grounded-generation-overview), based on the retrieved documents, including citations.\n", + "3. A query service that automatically encodes the query into embedding, and retrieves the most relevant text segments (including support for [Hybrid Search](https://docs.vectara.com/docs/api-reference/search-apis/lexical-matching) and [MMR](https://vectara.com/get-diverse-results-and-comprehensive-summaries-with-vectaras-mmr-reranker/), [Multilingual](https://vectara.com/blog/unlocking-the-state-of-the-art-reranker-introducing-the-vectara-multilingual-reranker_v1/), or [User Defined Function](https://vectara.com/blog/introducing-user-defined-functions-for-vectara/) reranking)\n", + "4. An option to a create [generative summary](https://docs.vectara.com/docs/learn/grounded-generation/grounded-generation-overview) with a selection of different LLMs (including [Mockingbird](https://vectara.com/blog/mockingbird-is-a-rag-specific-llm-that-beats-gpt-4-gemini-1-5-pro-in-rag-output-quality/)), based on the retrieved documents, including citations.\n", "\n", "See the [Vectara API documentation](https://docs.vectara.com/docs/) for more information on how to use the API." ] @@ -86,7 +86,18 @@ "execution_count": null, "id": "c154dd4b", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'ai-bill-of-rights.pdf'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "from llama_index.indices.managed.vectara import VectaraIndex\n", "import requests\n", @@ -137,7 +148,7 @@ { "data": { "text/plain": [ - "\"The risks associated with AI include potential biases leading to discriminatory outcomes, lack of transparency in decision-making processes, and challenges in establishing public trust and understanding of algorithmic systems [1]. Safety and efficacy concerns arise in the context of complex technologies like AI, necessitating strong regulations and proactive risk mitigation strategies [2]. The process of identifying and addressing risks before and during the deployment of automated systems is crucial to prevent harm to individuals' rights, opportunities, and access [5]. Furthermore, the impact of AI risks can be most visible at the community level, emphasizing the importance of considering and mitigating harms to various communities [6]. Efforts are being made to translate principles into practice through laws, policies, and technical approaches to ensure AI systems are lawful, respectful, accurate, safe, understandable, responsible, and accountable [7].\"" + "'The risks of AI include biased data and discriminatory outcomes, opaque decision-making processes, and lack of public trust and understanding of algorithmic systems [1]. These risks can lead to harm to individuals and communities, including image-based abuse, incorrect evaluations, and violations of safety [7]. To mitigate these risks, it is essential to have ongoing transparency, value-sensitive and participatory design, explanations designed for relevant stakeholders, and public consultation [1]. Strong safety regulations and measures to address harms when they occur can enhance innovation in the context of complex technologies [2]. Additionally, industry is providing innovative solutions to mitigate risks to the safety and efficacy of AI systems, including risk assessments, auditing mechanisms, and documentation procedures [2].'" ] }, "execution_count": null, @@ -146,7 +157,9 @@ } ], "source": [ - "qe = index.as_query_engine(summary_enabled=True)\n", + "qe = index.as_query_engine(\n", + " summary_enabled=True, summary_prompt_name=\"mockingbird-1.0-2024-07-16\"\n", + ")\n", "qe.query(questions[0]).response" ] }, @@ -168,12 +181,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "The risks of AI include biased data leading to discriminatory outcomes, opaque decision-making processes, and lack of public trust and understanding in algorithmic systems [1]. Organizations are implementing innovative solutions like risk assessments, auditing mechanisms, and ongoing monitoring to mitigate safety and efficacy risks of AI systems [2]. Stakeholder engagement and a risk management framework by institutions like NIST aim to address risks to individuals, organizations, and society posed by AI technology [3]. Risk identification, mitigation, and focusing on safety and effectiveness of AI systems are crucial before and during deployment to protect people’s rights, opportunities, and access [5]. The concept of communities is integral in understanding the impact of AI and automated systems, as the potential harm may be most visible at the community level [6]. Practical implementation of principles such as lawful, purposeful, accurate, safe, and accountable AI is essential to address risks, with federal agencies adhering to guidelines promoting trustworthy AI [7]." + "The risks of AI include biased data and discriminatory outcomes, opaque decision-making processes, and lack of public trust and understanding of algorithmic systems [1]. These risks can lead to harm to individuals and communities, including image-based abuse, incorrect evaluations, and violations of safety [7]. To mitigate these risks, it is essential to have ongoing transparency, value-sensitive and participatory design, explanations designed for relevant stakeholders, and public consultation [1]. Strong safety regulations and measures to address harms when they occur can enhance innovation in the context of complex technologies [2]. Additionally, industry is providing innovative solutions to mitigate risks to the safety and efficacy of AI systems, including risk assessments, auditing mechanisms, and documentation procedures [2]." ] } ], "source": [ - "qe = index.as_query_engine(summary_enabled=True, streaming=True)\n", + "qe = index.as_query_engine(\n", + " summary_enabled=True,\n", + " summary_prompt_name=\"mockingbird-1.0-2024-07-16\",\n", + " streaming=True,\n", + ")\n", "response = qe.query(questions[0])\n", "\n", "for chunk in response.response_gen:\n", @@ -214,15 +231,15 @@ "text": [ "Question: What are the risks of AI?\n", "\n", - "Response: The risks of AI involve potential biases, opaque decision-making processes, and lack of public trust due to discriminatory outcomes and biased data [1]. To mitigate these risks, industry is implementing innovative solutions like risk assessments and monitoring mechanisms [2]. Stakeholder engagement and the development of a risk management framework by organizations like the National Institute of Standards and Technology aim to manage risks posed by AI to individuals, organizations, and society [3]. Identification and mitigation of potential risks, impact assessments, and balancing high impact risks with appropriate mitigation are crucial before and during the deployment of AI systems [5]. The Blueprint for an AI Bill of Rights emphasizes the protection of individuals from unsafe or ineffective AI systems [7].\n", + "Response: The risks of AI include potential biased data leading to discriminatory outcomes, opaque decision-making processes, lack of public trust, and understanding of algorithmic systems. Risks also involve safety concerns, such as AI systems violating the safety of individuals, and the proliferation of harmful technologies like AI-enabled \"nudification\" tools. Furthermore, the incorrect penalization by AI systems, as seen in the case of AI-powered cameras in delivery vans, can lead to adverse consequences for individuals. To mitigate these risks, ongoing transparency, participatory design, explanations for stakeholders, and public consultation are essential. Organizations are implementing innovative solutions like risk assessments, auditing mechanisms, and monitoring tools to address safety and efficacy concerns related to AI systems. The involvement of communities and stakeholders in the design and evaluation of AI systems is crucial to ensure trustworthiness and mitigate potential harms.\n", "\n", "Question: What should we do to prevent bad actors from using AI?\n", "\n", - "Response: To prevent the misuse of AI by malicious entities, several key measures can be implemented. Firstly, it is crucial to ensure that automated systems are designed with safety and effectiveness in mind, following principles such as being lawful, purposeful, accurate, secure, and transparent [2]. Entities should proactively identify and manage risks associated with sensitive data, conducting regular audits and limiting access to prevent misuse [3], [4], [5]. Additionally, ongoing monitoring of automated systems is essential to detect and address algorithmic discrimination and unforeseen interactions that could lead to misuse [6], [7]. By incorporating these practices into the design, development, and deployment of AI technologies, the potential for misuse by malicious entities can be significantly reduced.\n", + "Response: To stop malicious individuals from exploiting artificial intelligence, measures can be taken such as implementing ethical principles for the use of AI, ensuring transparency, accountability, and regular monitoring of AI systems, conducting equity assessments, using representative data, protecting against biases, and providing clear organizational oversight. Additionally, it is crucial to address algorithmic discrimination through proactive measures, ongoing disparity testing, and independent evaluations to protect individuals and communities from unjust treatment based on various characteristics. Furthermore, incorporating safeguards like plain language reporting, algorithmic impact assessments, and public disclosure of mitigation efforts can help prevent the misuse of AI by malicious actors [1][2][4].\n", "\n", "Question: What are the benefits?\n", "\n", - "Response: Artificial Intelligence (AI) offers various advantages, such as promoting the use of trustworthy AI systems with principles focusing on legality, performance, safety, transparency, and accountability [1]. Organizations are incorporating protections and ethical principles in AI development, aligning with global recommendations for responsible AI stewardship [2]. Furthermore, research is ongoing to enhance explainable AI systems for better human understanding and trust in AI outcomes [5]. The U.S. government is establishing councils and frameworks to advance AI technologies, ensuring responsible AI implementation across sectors [4], . AI can streamline processes, improve decision-making, and enhance efficiency, although challenges like bias, flaws, and accessibility issues need to be addressed to maximize its benefits [5].\n", + "Response: The advantages of using artificial intelligence include providing systems that are lawful, purposeful, accurate, safe, secure, and transparent. AI can be accountable, reliable, and effective, leading to trustworthy automated systems. Additionally, AI can enhance efficiency, decision-making processes, and innovation while supporting democratic values and ethical principles [1][2][7].\n", "\n" ] } @@ -262,12 +279,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "The search results indicate a focus on the relationship between humans and robots, emphasizing the need for co-intelligence and the best use of automated systems [2]. The discussions revolve around ensuring that automated systems are designed, tested, and protected to prevent potential harmful outcomes [1]. While there are concerns about the use of surveillance technology by companies like Amazon and Walmart, the emphasis is on balancing equities and maintaining oversight in law enforcement activities [5]. The search results do not directly answer whether robots will kill us all, but they highlight the importance of proactive protections, context-specific guidance, and existing policies to govern the use of automated systems in various settings [6]." + "Artificial intelligence will not rule the government. The government is implementing principles and guidelines to ensure the ethical and responsible use of AI in various sectors, including the federal government. These measures focus on transparency, accountability, safety, and adherence to national values, ensuring that AI is used in a manner that upholds civil rights, democratic values, and national security [1] [2] [3]." ] } ], "source": [ - "response = ce.stream_chat(\"Will robots kill us all?\")\n", + "response = ce.stream_chat(\"Will artificial intelligence rule the government?\")\n", "for chunk in response.chat_stream:\n", " print(chunk.delta or \"\", end=\"\", flush=True)" ] @@ -279,7 +296,9 @@ "source": [ "### Agentic RAG\n", "\n", - "Let's create a ReAct Agent using LlamaIndex that utilizes Vectara as its RAG tool.\n", + "Vectara also has its own package, [vectara-agentic](https://github.com/vectara/py-vectara-agentic), built on top of many features from LlamaIndex to easily implement agentic RAG applications. It allows you to create your own AI assistant with RAG query tools and other custom tools, such as making API calls to retrieve information from financial websites. You can find the full documentation for vectara-agentic [here](https://vectara.github.io/vectara-agentic-docs/).\n", + "\n", + "Let's create a ReAct Agent with a single RAG tool using vectara-agentic.\n", "For this you would need to use another LLM as the driver of the agent resoning, and we are using OpenAI's GPT4o here as an example.\n", "(for this to work, please make sure you have `OPENAI_API_KEY` defined in your environment)." ] @@ -287,129 +306,109 @@ { "cell_type": "code", "execution_count": null, - "id": "c6766a84-b228-4b80-a32b-0c167b843819", + "id": "69f0d504-bc72-4dfc-8cdf-83b8aa69206c", "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.agent import ReActAgent\n", - "from llama_index.llms.openai import OpenAI\n", - "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", - "\n", - "llm = OpenAI(model=\"gpt-4o\", temperature=0)\n", - "vectara_tool = QueryEngineTool(\n", - " query_engine=index.as_query_engine(\n", - " summary_enabled=True,\n", - " summary_num_results=5,\n", - " summary_response_lang=\"en\",\n", - " summary_prompt_name=\"vectara-summary-ext-24-05-large\",\n", - " reranker=\"mmr\",\n", - " rerank_k=50,\n", - " mmr_diversity_bias=0.2,\n", - " ),\n", - " metadata=ToolMetadata(\n", - " name=\"Vectara\",\n", - " description=\"Vectara Query Engine that is able to answer Questions about AI regulation.\",\n", - " ),\n", - ")\n", - "agent = ReActAgent.from_tools(\n", - " tools=[vectara_tool],\n", - " llm=llm,\n", - " context=\"\"\"\n", - " You are a helpful chatbot that answers any user questions around AI regulations using the Vectara tool.\n", - " You break down complex questions into simpler ones.\n", - " You use the Vectara query engine to help provide answers to simpler questions.\n", - " \"\"\",\n", - " verbose=True,\n", - ")" + "!pip install vectara-agentic" + ] + }, + { + "cell_type": "markdown", + "id": "7b8425b7-6235-408b-a02e-ff332916b4d6", + "metadata": {}, + "source": [ + "UNCOMMENT NAME ARGUMENT WHEN NEW VERSION OF VECTARA AGENTIC IS RELEASED. MAKE SURE THE AGENT DOES NOT USE \"vectara_11\" as the first Action" ] }, { "cell_type": "code", "execution_count": null, - "id": "8a68dbca-613a-4f44-aa9e-67a758f76683", + "id": "ee674bf5-fccb-42cb-8499-77aad14fc538", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ + "> Running step e6ab056d-843c-489d-91ad-fc0245168f7e. Step input: What are the risks of AI? What are the benefits? Compare and contrast and provide a summary with arguments for and against from experts.\n", "\u001b[1;3;38;5;200mThought: The current language of the user is: English. I need to use a tool to help me answer the question.\n", - "Action: Vectara\n", - "Action Input: {'input': 'What are the risks of AI?'}\n", - "\u001b[0m\u001b[1;3;34mObservation: The risks of AI include biased data leading to discriminatory outcomes, opaque decision-making processes, and a lack of public trust and understanding in algorithmic systems. Mitigation strategies discussed involve ongoing transparency, participatory design, and engaging with impacted communities to understand potential harms and integrate protections into the design of AI systems [1]. Additionally, there's a focus on identifying and mitigating risks before deployment, particularly those impacting people's rights, opportunities, or safety, with a strong emphasis on avoiding systems that inherently violate safety norms [5].\n", - "\u001b[0m\u001b[1;3;38;5;200mThought: I have obtained information about the risks of AI. Now, I need to gather information about the benefits of AI to provide a comprehensive comparison.\n", - "Action: Vectara\n", - "Action Input: {'input': 'What are the benefits of AI?'}\n", - "\u001b[0m\u001b[1;3;34mObservation: The benefits of AI include its transformative potential to improve people's lives by building better and more innovative infrastructure. It also offers the possibility to enhance community health, safety, and welfare by ensuring better representation of all voices, particularly those traditionally marginalized by technological advances [1]. AI can also prevent harms and improve opportunities, rights, and access for Americans, playing a central role in shaping important policies like the Blueprint for an AI Bill of Rights [2].\n", - "\u001b[0m\u001b[1;3;38;5;200mThought: I have gathered information about both the risks and benefits of AI. Now, I need to compare and contrast these points and provide a summary with arguments for and against from experts.\n", - "Answer: ### Comparison of Risks and Benefits of AI\n", - "\n", - "#### Risks of AI:\n", - "1. **Biased Data and Discriminatory Outcomes**: AI systems can perpetuate and even exacerbate biases present in the data they are trained on, leading to unfair and discriminatory outcomes.\n", - "2. **Opaque Decision-Making**: The decision-making processes of AI systems can be complex and not easily understandable, leading to a lack of transparency.\n", - "3. **Lack of Public Trust**: The opacity and potential biases in AI systems can result in a lack of trust and understanding from the public.\n", - "4. **Safety and Rights Violations**: There is a risk of AI systems violating safety norms and impacting people's rights, opportunities, or safety.\n", - "\n", - "#### Benefits of AI:\n", - "1. **Improved Infrastructure**: AI has the potential to transform and improve infrastructure, making it more innovative and efficient.\n", - "2. **Enhanced Community Health and Safety**: AI can play a significant role in improving community health, safety, and welfare by ensuring better representation and inclusivity.\n", - "3. **Prevention of Harms**: AI can help prevent harms and improve opportunities, rights, and access, particularly for marginalized communities.\n", - "4. **Policy Shaping**: AI is central to shaping important policies, such as the Blueprint for an AI Bill of Rights, which aims to protect and enhance the rights of individuals.\n", + "Action: query_ai\n", + "Action Input: {'query': 'What are the risks and benefits of AI? Provide a summary with arguments for and against from experts.'}\n", + "\u001b[0m\u001b[1;3;34mObservation: {'response': \"Here is a summary of the risks and benefits of AI based on the provided sources:\\n\\n**Benefits of AI:**\\n\\n* AI has transformative potential to improve Americans' lives [1]\\n* AI-enabled systems can build better and more innovative infrastructure [3]\\n* AI can be integrated into communities in a thoughtful and responsible way, benefiting from lessons learned from urban planning [3]\\n\\n**Risks of AI:**\\n\\n* AI can lead to biased data and discriminatory outcomes [2]\\n* Opaque decision-making processes can lack public trust and understanding [2]\\n* AI can pose risks to individuals, organizations, and society, highlighting the need for risk management frameworks [5]\\n\\n**Addressing Risks and Benefits:**\\n\\n* The White House Office of Science and Technology Policy has led a year-long process to seek input from experts and stakeholders on the issue of algorithmic and data-driven harms [1]\\n* The Blueprint for an AI Bill of Rights emphasizes the importance of safe and effective systems, ongoing transparency, and value-sensitive and participatory design [1]\\n* Industry is providing innovative solutions to mitigate risks to the safety and efficacy of AI systems, including risk assessments, auditing mechanisms, and stakeholder engagement [5]\\n* The National Institute of Standards and Technology (NIST) is developing a risk management framework to better manage risks posed to individuals, organizations, and society by AI [5]\\n\\nOverall, the sources suggest that AI has the potential to bring significant benefits, but also poses risks that need to be addressed through thoughtful and responsible development and integration into communities.\", 'citation_metadata': {'metadata for citation 1': {'page': '1', 'lang': 'eng', 'section': '1', 'offset': '10959', 'len': '208', 'CreationDate': '1663695035', 'Producer': 'iLovePDF', 'Title': 'Blueprint for an AI Bill of Rights', 'Creator': 'Adobe Illustrator 26.3 (Macintosh)', 'ModDate': '1664808078', 'name': 'AI bill of rights', 'year': '2022', 'framework': 'llama_index', 'title': 'Blueprint for an AI Bill of Rights'}, 'metadata for citation 3': {'page': '56', 'title': 'Blueprint for an AI Bill of Rights', 'lang': 'eng', 'section': '4', 'offset': '1', 'len': '140', 'CreationDate': '1663695035', 'Producer': 'iLovePDF', 'Title': 'Blueprint for an AI Bill of Rights', 'Creator': 'Adobe Illustrator 26.3 (Macintosh)', 'ModDate': '1664808078', 'name': 'AI bill of rights', 'year': '2022', 'framework': 'llama_index'}, 'metadata for citation 2': {'page': '56', 'title': 'Blueprint for an AI Bill of Rights', 'lang': 'eng', 'section': '4', 'offset': '9099', 'len': '689', 'CreationDate': '1663695035', 'Producer': 'iLovePDF', 'Title': 'Blueprint for an AI Bill of Rights', 'Creator': 'Adobe Illustrator 26.3 (Macintosh)', 'ModDate': '1664808078', 'name': 'AI bill of rights', 'year': '2022', 'framework': 'llama_index'}, 'metadata for citation 5': {'page': '1', 'lang': 'eng', 'section': '1', 'offset': '52952', 'len': '1025', 'CreationDate': '1663695035', 'Producer': 'iLovePDF', 'Title': 'Blueprint for an AI Bill of Rights', 'Creator': 'Adobe Illustrator 26.3 (Macintosh)', 'ModDate': '1664808078', 'name': 'AI bill of rights', 'year': '2022', 'framework': 'llama_index', 'title': 'Blueprint for an AI Bill of Rights'}}, 'factual_consistency': 0.41484985}\n", + "\u001b[0m> Running step fe08e033-f8d6-4596-af28-68c156737665. Step input: None\n", + "\u001b[1;3;38;5;200mThought: I can answer without using any more tools. I'll use the user's language to answer\n", + "Answer: The benefits and risks of AI are widely discussed among experts, and here is a summary based on the provided sources:\n", "\n", - "### Summary with Arguments For and Against AI\n", + "**Benefits of AI:**\n", + "- AI has the potential to transform lives by improving various aspects of daily living.\n", + "- It can lead to the development of better and more innovative infrastructure.\n", + "- When integrated thoughtfully and responsibly, AI can enhance community development, drawing lessons from urban planning.\n", "\n", - "#### Arguments For AI:\n", - "- **Innovation and Efficiency**: AI can drive significant advancements in technology and infrastructure, leading to more efficient and innovative solutions.\n", - "- **Inclusivity and Representation**: AI can ensure better representation of marginalized voices, leading to more equitable outcomes.\n", - "- **Health and Safety**: AI can enhance community health and safety by providing better tools and systems for monitoring and intervention.\n", - "- **Policy and Rights**: AI can play a crucial role in shaping policies that protect and enhance individual rights and opportunities.\n", + "**Risks of AI:**\n", + "- AI systems can result in biased data and discriminatory outcomes.\n", + "- The decision-making processes of AI can be opaque, leading to a lack of public trust and understanding.\n", + "- AI poses risks to individuals, organizations, and society, necessitating the development of risk management frameworks.\n", "\n", - "#### Arguments Against AI:\n", - "- **Bias and Discrimination**: The risk of biased data leading to discriminatory outcomes is a significant concern.\n", - "- **Transparency and Trust**: The opaque nature of AI decision-making processes can erode public trust and understanding.\n", - "- **Safety Risks**: There is a potential for AI systems to violate safety norms and impact people's rights and safety negatively.\n", - "- **Complexity of Mitigation**: Mitigating the risks associated with AI requires ongoing transparency, participatory design, and engagement with impacted communities, which can be complex and resource-intensive.\n", + "**Addressing Risks and Benefits:**\n", + "- The White House Office of Science and Technology Policy has engaged in a process to gather input from experts and stakeholders on algorithmic and data-driven harms.\n", + "- The Blueprint for an AI Bill of Rights emphasizes the need for safe and effective systems, transparency, and participatory design.\n", + "- The industry is working on innovative solutions to mitigate AI risks, including risk assessments, auditing mechanisms, and stakeholder engagement.\n", + "- The National Institute of Standards and Technology (NIST) is developing a risk management framework to manage AI-related risks effectively.\n", "\n", - "In conclusion, while AI offers numerous benefits, including innovation, improved infrastructure, and enhanced community welfare, it also poses significant risks related to bias, transparency, and safety. Experts argue that a balanced approach, involving robust mitigation strategies and inclusive design, is essential to harness the benefits of AI while minimizing its risks.\n", - "\u001b[0m### Comparison of Risks and Benefits of AI\n", + "Overall, while AI offers significant benefits, it also presents risks that require careful management and responsible integration into society.\n", + "\u001b[0mThe benefits and risks of AI are widely discussed among experts, and here is a summary based on the provided sources:\n", "\n", - "#### Risks of AI:\n", - "1. **Biased Data and Discriminatory Outcomes**: AI systems can perpetuate and even exacerbate biases present in the data they are trained on, leading to unfair and discriminatory outcomes.\n", - "2. **Opaque Decision-Making**: The decision-making processes of AI systems can be complex and not easily understandable, leading to a lack of transparency.\n", - "3. **Lack of Public Trust**: The opacity and potential biases in AI systems can result in a lack of trust and understanding from the public.\n", - "4. **Safety and Rights Violations**: There is a risk of AI systems violating safety norms and impacting people's rights, opportunities, or safety.\n", + "**Benefits of AI:**\n", + "- AI has the potential to transform lives by improving various aspects of daily living.\n", + "- It can lead to the development of better and more innovative infrastructure.\n", + "- When integrated thoughtfully and responsibly, AI can enhance community development, drawing lessons from urban planning.\n", "\n", - "#### Benefits of AI:\n", - "1. **Improved Infrastructure**: AI has the potential to transform and improve infrastructure, making it more innovative and efficient.\n", - "2. **Enhanced Community Health and Safety**: AI can play a significant role in improving community health, safety, and welfare by ensuring better representation and inclusivity.\n", - "3. **Prevention of Harms**: AI can help prevent harms and improve opportunities, rights, and access, particularly for marginalized communities.\n", - "4. **Policy Shaping**: AI is central to shaping important policies, such as the Blueprint for an AI Bill of Rights, which aims to protect and enhance the rights of individuals.\n", + "**Risks of AI:**\n", + "- AI systems can result in biased data and discriminatory outcomes.\n", + "- The decision-making processes of AI can be opaque, leading to a lack of public trust and understanding.\n", + "- AI poses risks to individuals, organizations, and society, necessitating the development of risk management frameworks.\n", "\n", - "### Summary with Arguments For and Against AI\n", + "**Addressing Risks and Benefits:**\n", + "- The White House Office of Science and Technology Policy has engaged in a process to gather input from experts and stakeholders on algorithmic and data-driven harms.\n", + "- The Blueprint for an AI Bill of Rights emphasizes the need for safe and effective systems, transparency, and participatory design.\n", + "- The industry is working on innovative solutions to mitigate AI risks, including risk assessments, auditing mechanisms, and stakeholder engagement.\n", + "- The National Institute of Standards and Technology (NIST) is developing a risk management framework to manage AI-related risks effectively.\n", "\n", - "#### Arguments For AI:\n", - "- **Innovation and Efficiency**: AI can drive significant advancements in technology and infrastructure, leading to more efficient and innovative solutions.\n", - "- **Inclusivity and Representation**: AI can ensure better representation of marginalized voices, leading to more equitable outcomes.\n", - "- **Health and Safety**: AI can enhance community health and safety by providing better tools and systems for monitoring and intervention.\n", - "- **Policy and Rights**: AI can play a crucial role in shaping policies that protect and enhance individual rights and opportunities.\n", - "\n", - "#### Arguments Against AI:\n", - "- **Bias and Discrimination**: The risk of biased data leading to discriminatory outcomes is a significant concern.\n", - "- **Transparency and Trust**: The opaque nature of AI decision-making processes can erode public trust and understanding.\n", - "- **Safety Risks**: There is a potential for AI systems to violate safety norms and impact people's rights and safety negatively.\n", - "- **Complexity of Mitigation**: Mitigating the risks associated with AI requires ongoing transparency, participatory design, and engagement with impacted communities, which can be complex and resource-intensive.\n", - "\n", - "In conclusion, while AI offers numerous benefits, including innovation, improved infrastructure, and enhanced community welfare, it also poses significant risks related to bias, transparency, and safety. Experts argue that a balanced approach, involving robust mitigation strategies and inclusive design, is essential to harness the benefits of AI while minimizing its risks.\n" + "Overall, while AI offers significant benefits, it also presents risks that require careful management and responsible integration into society.\n" ] } ], "source": [ - "question = \"\"\"\n", - " What are the risks of AI? What are the benefits?\n", - " Compare and contrast and provide a summary with arguments for and against from experts.\n", - "\"\"\"\n", + "from vectara_agentic.agent import Agent\n", + "import os\n", + "from dotenv import load_dotenv\n", + "\n", + "load_dotenv(override=True)\n", + "\n", + "api_key = str(os.environ[\"VECTARA_API_KEY\"])\n", + "corpus_id = str(os.environ[\"VECTARA_CORPUS_ID\"])\n", + "customer_id = str(os.environ[\"VECTARA_CUSTOMER_ID\"])\n", "\n", - "print(agent.chat(question).response)" + "agent = Agent.from_corpus(\n", + " tool_name=\"query_ai\",\n", + " vectara_customer_id=customer_id,\n", + " vectara_corpus_id=corpus_id,\n", + " vectara_api_key=api_key,\n", + " data_description=\"AI regulations\",\n", + " assistant_specialty=\"artificial intelligence\",\n", + " verbose=True,\n", + " vectara_summary_num_results=5,\n", + " vectara_summarizer=\"mockingbird-1.0-2024-07-16\",\n", + " vectara_reranker=\"mmr\",\n", + " vectara_rerank_k=50,\n", + ")\n", + "\n", + "print(\n", + " agent.chat(\n", + " \"What are the risks of AI? What are the benefits? Compare and contrast and provide a summary with arguments for and against from experts.\"\n", + " )\n", + ")" ] } ], diff --git a/docs/docs/examples/managed/zcpDemo.ipynb b/docs/docs/examples/managed/zcpDemo.ipynb index 10e194c7b49e6..7e13d46d764f7 100644 --- a/docs/docs/examples/managed/zcpDemo.ipynb +++ b/docs/docs/examples/managed/zcpDemo.ipynb @@ -5,7 +5,7 @@ "id": "adf7d63d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/metadata_extraction/EntityExtractionClimate.ipynb b/docs/docs/examples/metadata_extraction/EntityExtractionClimate.ipynb index 5c638323ba90d..6b3cced776420 100644 --- a/docs/docs/examples/metadata_extraction/EntityExtractionClimate.ipynb +++ b/docs/docs/examples/metadata_extraction/EntityExtractionClimate.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/metadata_extraction/MetadataExtractionSEC.ipynb b/docs/docs/examples/metadata_extraction/MetadataExtractionSEC.ipynb index 6c5ca04b1ad1a..337ace51daeec 100644 --- a/docs/docs/examples/metadata_extraction/MetadataExtractionSEC.ipynb +++ b/docs/docs/examples/metadata_extraction/MetadataExtractionSEC.ipynb @@ -5,7 +5,7 @@ "id": "76534cce", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/metadata_extraction/MetadataExtraction_LLMSurvey.ipynb b/docs/docs/examples/metadata_extraction/MetadataExtraction_LLMSurvey.ipynb index cfeec5999290e..b851c14b06c08 100644 --- a/docs/docs/examples/metadata_extraction/MetadataExtraction_LLMSurvey.ipynb +++ b/docs/docs/examples/metadata_extraction/MetadataExtraction_LLMSurvey.ipynb @@ -5,7 +5,7 @@ "id": "41aecfc5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/multi_modal/mm_agent.ipynb b/docs/docs/examples/multi_modal/mm_agent.ipynb index 1dd5a00d36baa..97f6b222644b6 100644 --- a/docs/docs/examples/multi_modal/mm_agent.ipynb +++ b/docs/docs/examples/multi_modal/mm_agent.ipynb @@ -7,7 +7,7 @@ "source": [ "# [Beta] Multi-modal ReAct Agent\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "In this tutorial we show you how to construct a multi-modal ReAct agent.\n", "\n", diff --git a/docs/docs/examples/node_postprocessor/MixedbreadAIRerank.ipynb b/docs/docs/examples/node_postprocessor/MixedbreadAIRerank.ipynb index 60cadf8f8ae20..fac05e6406c75 100644 --- a/docs/docs/examples/node_postprocessor/MixedbreadAIRerank.ipynb +++ b/docs/docs/examples/node_postprocessor/MixedbreadAIRerank.ipynb @@ -5,7 +5,7 @@ "id": "4b5daafbac08a79e", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/node_postprocessor/NVIDIARerank.ipynb b/docs/docs/examples/node_postprocessor/NVIDIARerank.ipynb index a6733818b797e..eee4783a5a459 100644 --- a/docs/docs/examples/node_postprocessor/NVIDIARerank.ipynb +++ b/docs/docs/examples/node_postprocessor/NVIDIARerank.ipynb @@ -63,7 +63,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet llama-index-postprocessor-nvidia-rerank" + "%pip install --upgrade --quiet llama-index-postprocessor-nvidia-rerank llama-index-llms-nvidia llama-index-readers-file" ] }, { @@ -114,13 +114,120 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n" + ] + } + ], "source": [ "from llama_index.postprocessor.nvidia_rerank import NVIDIARerank\n", + "from llama_index.core import SimpleDirectoryReader, Settings, VectorStoreIndex\n", + "from llama_index.embeddings.nvidia import NVIDIAEmbedding\n", + "from llama_index.llms.nvidia import NVIDIA\n", + "from llama_index.core.node_parser import SentenceSplitter\n", + "from llama_index.core import Settings\n", + "import os\n", "\n", "reranker = NVIDIARerank(top_n=4)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mkdir: cannot create directory ‘data’: File exists\n", + "--2024-07-03 10:33:17-- https://www.dropbox.com/scl/fi/p33j9112y0ysgwg77fdjz/2021_Housing_Inventory.pdf?rlkey=yyok6bb18s5o31snjd2dxkxz3&dl=0\n", + "Resolving www.dropbox.com (www.dropbox.com)... 162.125.81.18, 2620:100:6031:18::a27d:5112\n", + "Connecting to www.dropbox.com (www.dropbox.com)|162.125.81.18|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: https://uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com/cd/0/inline/CV9Hy3nIrjnOf-Fqsgd-YhHcMaj0AHvOQaE1b4sdiKnOBqZL_u9ml6dAGctGxr5I79yD_kI8BNwDtFl_ll_sdfdt0iXcIYosfxaPr2NdbkRAMR6vg9UXuCU8kNEFi0D3Grs/file# [following]\n", + "--2024-07-03 10:33:18-- https://uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com/cd/0/inline/CV9Hy3nIrjnOf-Fqsgd-YhHcMaj0AHvOQaE1b4sdiKnOBqZL_u9ml6dAGctGxr5I79yD_kI8BNwDtFl_ll_sdfdt0iXcIYosfxaPr2NdbkRAMR6vg9UXuCU8kNEFi0D3Grs/file\n", + "Resolving uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com (uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com)... 162.125.81.15, 2620:100:6031:15::a27d:510f\n", + "Connecting to uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com (uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com)|162.125.81.15|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: /cd/0/inline2/CV9Ugj_mK7TSMb3sw_BdQFrj2rzx-SI2cfGU7-VF4bcW3PdhxO4qw--AXQKUidWtDL_54rViwvbaBGHMvtMEAK_lCIwXXj5XwkKpJKTmP0mDrz8eU2qu0FGyi4uOGfO7TeNLFMFY_bBGUMHMatvKJVPF59Ps94-8LC40ba-Cgv2YKZtcU-UjFpLh-Fnf6emkG-c8eUWB2uKPX_Lx0E4hCENQEPOGOfMhDHU0DC8k6khZiilmLtjXsDJ0H4y3efQ-Fz-VsWCC2FcoGpDcxXGu1Ysp5-mP2eHpH3qOx20d2IrndwN4RGLAqzR6cfsOHPMvoYPyLjOW1322t1O46mXqcjv94OPEEIIHI-2K8xL4pBjLUQ/file [following]\n", + "--2024-07-03 10:33:18-- https://uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com/cd/0/inline2/CV9Ugj_mK7TSMb3sw_BdQFrj2rzx-SI2cfGU7-VF4bcW3PdhxO4qw--AXQKUidWtDL_54rViwvbaBGHMvtMEAK_lCIwXXj5XwkKpJKTmP0mDrz8eU2qu0FGyi4uOGfO7TeNLFMFY_bBGUMHMatvKJVPF59Ps94-8LC40ba-Cgv2YKZtcU-UjFpLh-Fnf6emkG-c8eUWB2uKPX_Lx0E4hCENQEPOGOfMhDHU0DC8k6khZiilmLtjXsDJ0H4y3efQ-Fz-VsWCC2FcoGpDcxXGu1Ysp5-mP2eHpH3qOx20d2IrndwN4RGLAqzR6cfsOHPMvoYPyLjOW1322t1O46mXqcjv94OPEEIIHI-2K8xL4pBjLUQ/file\n", + "Reusing existing connection to uc471d2c8af935aa4ab2f86937a6.dl.dropboxusercontent.com:443.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 4808625 (4.6M) [application/pdf]\n", + "Saving to: ‘data/housing_data.pdf’\n", + "\n", + "data/housing_data.p 100%[===================>] 4.58M 2.68MB/s in 1.7s \n", + "\n", + "2024-07-03 10:33:21 (2.68 MB/s) - ‘data/housing_data.pdf’ saved [4808625/4808625]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir data\n", + "!wget \"https://www.dropbox.com/scl/fi/p33j9112y0ysgwg77fdjz/2021_Housing_Inventory.pdf?rlkey=yyok6bb18s5o31snjd2dxkxz3&dl=0\" -O \"data/housing_data.pdf\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Settings.text_splitter = SentenceSplitter(chunk_size=500)\n", + "\n", + "documents = SimpleDirectoryReader(\"./data\").load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Settings.embed_model = NVIDIAEmbedding(model=\"NV-Embed-QA\", truncate=\"END\")\n", + "\n", + "index = VectorStoreIndex.from_documents(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Settings.llm = NVIDIA()\n", + "\n", + "query_engine = index.as_query_engine(\n", + " similarity_top_k=20, node_postprocessors=[reranker]\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The net gain in housing units in the Mission in 2021 was not specified in the provided context information.\n" + ] + } + ], + "source": [ + "response = query_engine.query(\n", + " \"What was the net gain in housing units in the Mission in 2021?\"\n", + ")\n", + "print(response)" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/docs/docs/examples/node_postprocessor/openvino_rerank.ipynb b/docs/docs/examples/node_postprocessor/openvino_rerank.ipynb index 62afb44bdb7ab..12eac60e32b7a 100644 --- a/docs/docs/examples/node_postprocessor/openvino_rerank.ipynb +++ b/docs/docs/examples/node_postprocessor/openvino_rerank.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/node_postprocessor/rankLLM.ipynb b/docs/docs/examples/node_postprocessor/rankLLM.ipynb index 6da31bd383195..eaac6373ab9da 100644 --- a/docs/docs/examples/node_postprocessor/rankLLM.ipynb +++ b/docs/docs/examples/node_postprocessor/rankLLM.ipynb @@ -6,23 +6,23 @@ "source": [ "# RankLLM Reranker Demonstration (Van Gogh Wiki)\n", "\n", - "This demo showcases how to use RankLLM (https://github.com/castorini/rank_llm) to rerank passages. RankLLM offers a suite of listwise rerankers, albeit with focus on open source LLMs finetuned for the task - RankVicuna and RankZephyr being two of them.\n", + "This demo showcases how to use [RankLLM](https://github.com/castorini/rank_llm) to rerank passages. \n", "\n", - "It compares query search results from Van Gogh’s wikipedia with just retrieval (using VectorIndexRetriever from llama-index) and retrieval+reranking with RankLLM. It demonstrates two models from RankLLM:\n", + "RankLLM offers a suite of listwise rerankers, albeit with focus on open source LLMs finetuned for the task - RankVicuna and RankZephyr being two of them.\n", "\n", - "- ```RankVicuna 7B V1```\n", - "- ```RankZephyr 7B V1 - Full - BF16```\n", + "It compares query search results from Van Gogh’s wikipedia with just retrieval (using VectorIndexRetriever from llama-index) and retrieval+reranking with RankLLM. We show an example of reranking 50 candidates using the RankZephyr reranker, which uses a listwise sliding window algorithm.\n", "\n", + "\n", + "_______________________________\n", "Dependencies:\n", "\n", - "- Currently, RankLLM's rerankers require requires `CUDA`\n", + "- **CUDA**\n", "- The built-in retriever, which uses [Pyserini](https://github.com/castorini/pyserini), requires `JDK11`, `PyTorch`, and `Faiss`\n", "\n", "\n", "### castorini/rank_llm\n", - "Repository for prompt-decoding using LLMs (```GPT3.5```, ```GPT4```, ```Vicuna```, and ```Zephyr```)\\\n", - "Website: [http://rankllm.ai](http://rankllm.ai)\\\n", - "Stars: 193" + "Suite of LLM-based reranking models (e.g, `RankZephyr`, `LiT5`, `RankVicuna`)\n", + "Website: [http://rankllm.ai](http://rankllm.ai)\\" ] }, { @@ -165,12 +165,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Retrieval + RankLLM Reranking\n", + "### Retrieval + RankLLM Reranking (sliding window)\n", "\n", "1. Set up retriever and reranker\n", "2. Retrieve results given search query without reranking\n", - "3. Retrieve results given search query with RankZephyr reranking\n", - "4. Retrieve results given search query with RankVicuna reranking" + "3. Retrieve results given search query with RankZephyr reranking" ] }, { @@ -181,9 +180,10 @@ "source": [ "from llama_index.core.retrievers import VectorIndexRetriever\n", "from llama_index.core import QueryBundle\n", - "from llama_index.postprocessor.rankLLM_rerank import RankLLMRerank\n", + "from llama_index.postprocessor.rankllm_rerank import RankLLMRerank\n", "\n", "import pandas as pd\n", + "import torch\n", "from IPython.display import display, HTML\n", "\n", "\n", @@ -192,9 +192,8 @@ " vector_top_k=10,\n", " reranker_top_n=3,\n", " with_reranker=False,\n", - " with_retrieval=False,\n", - " model=\"zephyr\",\n", - " gpt_model=\"gpt-3.5-turbo\",\n", + " model=\"rank_zephyr\",\n", + " window_size=None,\n", "):\n", " query_bundle = QueryBundle(query_str)\n", " # configure retriever\n", @@ -203,19 +202,21 @@ " similarity_top_k=vector_top_k,\n", " )\n", " retrieved_nodes = retriever.retrieve(query_bundle)\n", + " retrieved_nodes.reverse()\n", "\n", " if with_reranker:\n", " # configure reranker\n", " reranker = RankLLMRerank(\n", - " top_n=reranker_top_n,\n", - " model=model,\n", - " with_retrieval=with_retrieval,\n", - " gpt_model=gpt_model,\n", + " model=model, top_n=reranker_top_n, window_size=window_size\n", " )\n", " retrieved_nodes = reranker.postprocess_nodes(\n", " retrieved_nodes, query_bundle\n", " )\n", "\n", + " # clear cache, rank_zephyr uses 16GB of GPU VRAM\n", + " del reranker\n", + " torch.cuda.empty_cache()\n", + "\n", " return retrieved_nodes\n", "\n", "\n", @@ -236,9 +237,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Retrieval top 3 results without reranking\n", + "### Without `RankZephyr` reranking, the correct result is ranked `47`th/50.\n", "\n", - "## Expected result:\n", + "#### Expected result:\n", "```After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers;```" ] }, @@ -269,18 +270,18 @@ " \n", " \n", " 0\n", - " 0.851540\n", - " Gauguin fled Arles, never to see Van Gogh again. They continued to correspond, and in 1890, Gauguin proposed they form a studio in Antwerp. Meanwhile, other visitors to the hospital included Marie Ginoux and Roulin.Despite a pessimistic diagnosis, Van Gogh recovered and returned to the Yellow House on 7 January 1889. He spent the following month between hospital and home, suffering from hallucinations and delusions of poisoning. In March, the police closed his house after a petition by 30 townspeople (including the Ginoux family) who described him as le fou roux \"the redheaded madman\"; Van Gogh returned to hospital. Paul Signac visited him twice in March; in April, Van Gogh moved into rooms owned by Dr Rey after floods damaged paintings in his own home. Two months later, he left Arles and voluntarily entered an asylum in Saint-Rémy-de-Provence. Around this time, he wrote, \"Sometimes moods of indescribable anguish, sometimes moments when the veil of time and fatality of circumstances seemed to be torn apart for an instant.\"Van Gogh gave his 1889 Portrait of Doctor Félix Rey to Dr Rey. The physician was not fond of the painting and used it to repair a chicken coop, then gave it away. In 2016, the portrait was housed at the Pushkin Museum of Fine Arts and estimated to be worth over $50 million.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Saint-Rémy (May 1889 – May 1890) ====

Van Gogh entered the Saint-Paul-de-Mausole asylum on 8 May 1889, accompanied by his caregiver, Frédéric Salles, a Protestant clergyman. Saint-Paul was a former monastery in Saint-Rémy, located less than 30 kilometres (19 mi) from Arles, and it was run by a former naval doctor, Théophile Peyron. Van Gogh had two cells with barred windows, one of which he used as a studio. The clinic and its garden became the main subjects of his paintings.\n", + " 0.766322\n", + " Yellow meant the most to him, because it symbolised emotional truth. He used yellow as a symbol for sunlight, life, and God.
Van Gogh strove to be a painter of rural life and nature; during his first summer in Arles he used his new palette to paint landscapes and traditional rural life. His belief that a power existed behind the natural led him to try to capture a sense of that power, or the essence of nature in his art, sometimes through the use of symbols. His renditions of the sower, at first copied from Jean-François Millet, reflect the influence of Thomas Carlyle and Friedrich Nietzsche's thoughts on the heroism of physical labour, as well as van Gogh's religious beliefs: the sower as Christ sowing life beneath the hot sun. These were themes and motifs he returned to often to rework and develop. His paintings of flowers are filled with symbolism, but rather than use traditional Christian iconography he made up his own, where life is lived under the sun and work is an allegory of life. In Arles, having gained confidence after painting spring blossoms and learning to capture bright sunlight, he was ready to paint The Sower.

Van Gogh stayed within what he called the \"guise of reality\" and was critical of overly stylised works. He wrote afterwards that the abstraction of Starry Night had gone too far and that reality had \"receded too far in the background\". Hughes describes it as a moment of extreme visionary ecstasy: the stars are in a great whirl, reminiscent of Hokusai's Great Wave, the movement in the heaven above is reflected by the movement of the cypress on the earth below, and the painter's vision is \"translated into a thick, emphatic plasma of paint\".
Between 1885 and his death in 1890, van Gogh appears to have been building an oeuvre, a collection that reflected his personal vision and could be commercially successful. He was influenced by Blanc's definition of style, that a true painting required optimal use of colour, perspective and brushstrokes. Van Gogh applied the word \"purposeful\" to paintings he thought he had mastered, as opposed to those he thought of as studies.\n", " \n", " \n", " 1\n", - " 0.849399\n", - " On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of Van Gogh.Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; Van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated Van Gogh. They often quarrelled; Van Gogh increasingly feared that Gauguin was going to desert him, and the situation, which Van Gogh described as one of \"excessive tension\", rapidly headed towards crisis point.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that Van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\"\n", + " 0.768032\n", + " ==== Self-portraits ====

Van Gogh created more than 43 self-portraits between 1885 and 1889. They were usually completed in series, such as those painted in Paris in mid-1887, and continued until shortly before his death. Generally the portraits were studies, created during periods when he was reluctant to mix with others or when he lacked models and painted himself.
Van Gogh's self-portraits reflect a high degree of self-scrutiny. Often they were intended to mark important periods in his life; for example, the mid-1887 Paris series were painted at the point where he became aware of Claude Monet, Paul Cézanne and Signac. In Self-Portrait with Grey Felt Hat, heavy strains of paint spread outwards across the canvas. It is one of his most renowned self-portraits of that period, \"with its highly organized rhythmic brushstrokes, and the novel halo derived from the Neo-impressionist repertoire was what van Gogh himself called a 'purposeful' canvas\".
They contain a wide array of physiognomical representations. Van Gogh's mental and physical condition is usually apparent; he may appear unkempt, unshaven or with a neglected beard, with deeply sunken eyes, a weak jaw, or having lost teeth. Some show him with full lips, a long face or prominent skull, or sharpened, alert features. His hair is sometimes depicted in a vibrant reddish hue and at other times ash colored.
Van Gogh's self-portraits vary stylistically. In those painted after December 1888, the strong contrast of vivid colors highlight the haggard pallor of his skin. Some depict the artist with a beard, others without. He can be seen with bandages in portraits executed just after he mutilated his ear. In only a few does he depict himself as a painter. Those painted in Saint-Rémy show the head from the right, the side opposite his damaged ear, as he painted himself reflected in his mirror.\n", " \n", " \n", " 2\n", - " 0.848984\n", - " When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\"When Boch visited again, Van Gogh painted a portrait of him, as well as the study The Poet Against a Starry Sky.In preparation for Gauguin's visit, Van Gogh bought two beds on advice from the station's postal supervisor Joseph Roulin, whose portrait he painted. On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps.\n", + " 0.769051\n", + " With their broad brushstrokes, inventive perspectives, colours, contours and designs, these paintings represent the style he sought.


=== Major series ===

Van Gogh's stylistic developments are usually linked to the periods he spent living in different places across Europe. He was inclined to immerse himself in local cultures and lighting conditions, although he maintained a highly individual visual outlook throughout. His evolution as an artist was slow and he was aware of his painterly limitations. Van Gogh moved home often, perhaps to expose himself to new visual stimuli, and through exposure develop his technical skill. Art historian Melissa McQuillan believes the moves also reflect later stylistic changes and that van Gogh used the moves to avoid conflict, and as a coping mechanism for when the idealistic artist was faced with the realities of his then current situation.


==== Portraits ====

Van Gogh said portraiture was his greatest interest. \"What I'm most passionate about, much much more than all the rest in my profession\", he wrote in 1890, \"is the portrait, the modern portrait.\" It is \"the only thing in painting that moves me deeply and that gives me a sense of the infinite.\" He wrote to his sister that he wished to paint portraits that would endure, and that he would use colour to capture their emotions and character rather than aiming for photographic realism. Those closest to van Gogh are mostly absent from his portraits; he rarely painted Theo, van Rappard or Bernard. The portraits of his mother were from photographs.
Van Gogh painted Arles' postmaster Joseph Roulin and his family repeatedly. In five versions of La Berceuse (The Lullaby), van Gogh painted Augustine Roulin quietly holding a rope that rocks the unseen cradle of her infant daughter. Van Gogh had planned for it to be the central image of a triptych, flanked by paintings of sunflowers.\n", " \n", " \n", "" @@ -296,159 +297,18 @@ "source": [ "new_nodes = get_retrieved_nodes(\n", " \"Which date did Paul Gauguin arrive in Arles?\",\n", - " vector_top_k=3,\n", + " vector_top_k=50,\n", " with_reranker=False,\n", - " model=\"zephyr\",\n", - ")\n", - "\n", - "visualize_retrieved_nodes(new_nodes)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### The correct result is ranked 3rd." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Retrieve and Rerank top 10 results using RankZephyr and return top 3" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", - "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "1ad13a552896432ba5b8fdcef814aab0", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Loading checkpoint shards: 0%| | 0/3 [00:00\n", - " \n", - " \n", - " \n", - " Score\n", - " Text\n", - " \n", - " \n", - " \n", - " \n", - " 0\n", - " 0.848984\n", - " When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\"When Boch visited again, Van Gogh painted a portrait of him, as well as the study The Poet Against a Starry Sky.In preparation for Gauguin's visit, Van Gogh bought two beds on advice from the station's postal supervisor Joseph Roulin, whose portrait he painted. On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps.\n", - " \n", - " \n", - " 1\n", - " 0.849399\n", - " On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of Van Gogh.Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; Van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated Van Gogh. They often quarrelled; Van Gogh increasingly feared that Gauguin was going to desert him, and the situation, which Van Gogh described as one of \"excessive tension\", rapidly headed towards crisis point.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that Van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\"\n", - " \n", - " \n", - " 2\n", - " 0.819207\n", - " The ear was brought to the hospital, but Rey did not attempt to reattach it as too much time had passed. Van Gogh researcher and art historian Bernadette Murphy discovered the true identity of the woman named Gabrielle, who died in Arles at the age of 80 in 1952, and whose descendants still lived (as of 2020) just outside Arles. Gabrielle, known in her youth as \"Gaby,\" was a 17-year-old cleaning girl at the brothel and other local establishments at the time Van Gogh presented her with his ear.Van Gogh had no recollection of the event, suggesting that he may have suffered an acute mental breakdown. The hospital diagnosis was \"acute mania with generalised delirium\", and within a few days, the local police ordered that he be placed in hospital care. Gauguin immediately notified Theo, who, on 24 December, had proposed marriage to his old friend Andries Bonger's sister Johanna. That evening, Theo rushed to the station to board a night train to Arles. He arrived on Christmas Day and comforted Vincent, who seemed to be semi-lucid. That evening, he left Arles for the return trip to Paris.During the first days of his treatment, Van Gogh repeatedly and unsuccessfully asked for Gauguin, who asked a policeman attending the case to \"be kind enough, Monsieur, to awaken this man with great care, and if he asks for me tell him I have left for Paris; the sight of me might prove fatal for him.\" Gauguin fled Arles, never to see Van Gogh again. They continued to correspond, and in 1890, Gauguin proposed they form a studio in Antwerp. Meanwhile, other visitors to the hospital included Marie Ginoux and Roulin.Despite a pessimistic diagnosis, Van Gogh recovered and returned to the Yellow House on 7 January 1889. He spent the following month between hospital and home, suffering from hallucinations and delusions of poisoning. In March, the police closed his house after a petition by 30 townspeople (including the Ginoux family) who described him as le fou roux \"the redheaded madman\"; Van Gogh returned to hospital.\n", - " \n", - " \n", - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "new_nodes = get_retrieved_nodes(\n", - " \"Which date did Paul Gauguin arrive in Arles?\",\n", - " vector_top_k=10,\n", - " reranker_top_n=3,\n", - " with_reranker=True,\n", - " with_retrieval=False,\n", - " model=\"zephyr\",\n", - ")\n", - "\n", - "visualize_retrieved_nodes(new_nodes)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### The correct result is ranked 1st after RankZephyr rerank." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Retrieve and Rerank top 10 results using RankVicuna and return top 3." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "new_nodes = get_retrieved_nodes(\n", - " \"Which date did Paul Gauguin arrive in Arles?\",\n", - " vector_top_k=10,\n", - " reranker_top_n=3,\n", - " with_reranker=True,\n", - " with_retrieval=False,\n", - " model=\"vicuna\",\n", ")\n", "\n", - "visualize_retrieved_nodes(new_nodes)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### The correct result is ranked 1st after RankVicuna rerank." + "visualize_retrieved_nodes(new_nodes[:3])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Retrieve and Rerank top 10 results using RankGPT and return top 3\n", - "\n", - "RankGPT is built into RankLLM and can be used as shown below. " + "### With `RankZephyr` reranking, the correct result is ranked `1`st/50" ] }, { @@ -461,29 +321,29 @@ "output_type": "stream", "text": [ "INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", - "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" + "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", + "Loading rank_zephyr ...\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - " 0%| | 0/1 [00:00\n", " \n", " 0\n", - " 0.849399\n", - " On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of Van Gogh.Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; Van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated Van Gogh. They often quarrelled; Van Gogh increasingly feared that Gauguin was going to desert him, and the situation, which Van Gogh described as one of \"excessive tension\", rapidly headed towards crisis point.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that Van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\"\n", + " 0.857234\n", + " After much pleading from van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted van Gogh in his The Painter of Sunflowers; van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of van Gogh.
Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated van Gogh. They often quarreled; van Gogh increasingly feared that Gauguin was going to desert him, and the situation, which van Gogh described as one of \"excessive tension\", rapidly headed towards crisis point.

\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\" This account is uncorroborated; Gauguin was almost certainly absent from the Yellow House that night, most likely staying in a hotel.
After an altercation on the evening of 23 December 1888, van Gogh returned to his room where he seemingly heard voices and either wholly or in part severed his left ear with a razor causing severe bleeding.\n", " \n", " \n", " 1\n", - " 0.848972\n", - " When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\"When Boch visited again, Van Gogh painted a portrait of him, as well as the study The Poet Against a Starry Sky.In preparation for Gauguin's visit, Van Gogh bought two beds on advice from the station's postal supervisor Joseph Roulin, whose portrait he painted. On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps.\n", + " 0.861649\n", + " ==== Gauguin's visit (1888) ====


When Gauguin agreed to visit Arles in 1888, van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\"
When Boch visited again, van Gogh painted a portrait of him, as well as the study The Poet Against a Starry Sky.
In preparation for Gauguin's visit, van Gogh bought two beds on advice from the station's postal supervisor Joseph Roulin, whose portrait he painted. On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.
After much pleading from van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted van Gogh in his The Painter of Sunflowers; van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of van Gogh.
Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated van Gogh.\n", " \n", " \n", " 2\n", - " 0.828397\n", - " In April, he was visited by the American artist Dodge MacKnight, who was living nearby at Fontvieille.On 1 May 1888, Van Gogh signed a lease for four rooms in the Yellow House. The house at 2 place Lamartine cost 15 francs per month. The rooms were unfurnished and had been uninhabited for months. Because the Yellow House had to be furnished before he could fully move in, Van Gogh moved from the Hôtel Carrel to the Café de la Gare on 7 May 1888. He had befriended the Yellow House's proprietors, Joseph and Marie Ginoux, and was able to use it as a studio. Van Gogh wanted a gallery to display his work and started a series of paintings that eventually included Van Gogh's Chair (1888), Bedroom in Arles (1888), The Night Café (1888), Café Terrace at Night (September 1888), Starry Night Over the Rhone (1888), and Still Life: Vase with Twelve Sunflowers (1888), all intended for the decoration for the Yellow House.Van Gogh wrote that with The Night Café he tried \"to express the idea that the café is a place where one can ruin oneself, go mad, or commit a crime\". When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\n", + " 0.852035\n", + " Gauguin fled Arles, never to see van Gogh again. They continued to correspond, and in 1890, Gauguin proposed they form a studio in Antwerp. Meanwhile, other visitors to the hospital included Marie Ginoux and Roulin.
Despite a pessimistic diagnosis, van Gogh recovered and returned to the Yellow House on 7 January 1889. He spent the following month between hospital and home, suffering from hallucinations and delusions of poisoning. In March, the police closed his house after a petition by 30 townspeople (including the Ginoux family) who described him as le fou roux \"the redheaded madman\"; Van Gogh returned to hospital. Paul Signac visited him twice in March; in April, van Gogh moved into rooms owned by Dr Rey after floods damaged paintings in his own home. Two months later, he left Arles and voluntarily entered an asylum in Saint-Rémy-de-Provence. Around this time, he wrote, \"Sometimes moods of indescribable anguish, sometimes moments when the veil of time and fatality of circumstances seemed to be torn apart for an instant.\"
Van Gogh gave his 1889 Portrait of Doctor Félix Rey to Dr Rey. The physician was not fond of the painting and used it to repair a chicken coop, then gave it away. In 2016, the portrait was housed at the Pushkin Museum of Fine Arts and estimated to be worth over $50 million.

\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Saint-Rémy (May 1889 – May 1890) ====

Van Gogh entered the Saint-Paul-de-Mausole asylum on 8 May 1889, accompanied by his caregiver, Frédéric Salles, a Protestant clergyman. Saint-Paul was a former monastery in Saint-Rémy, located less than 30 kilometres (19 mi) from Arles, and it was run by a former naval doctor, Théophile Peyron. Van Gogh had two cells with barred windows, one of which he used as a studio. The clinic and its garden became the main subjects of his paintings.\n", " \n", " \n", "" @@ -527,12 +387,11 @@ "source": [ "new_nodes = get_retrieved_nodes(\n", " \"Which date did Paul Gauguin arrive in Arles?\",\n", - " vector_top_k=10,\n", + " vector_top_k=50,\n", " reranker_top_n=3,\n", " with_reranker=True,\n", - " with_retrieval=False,\n", - " model=\"gpt\",\n", - " gpt_model=\"gpt-3.5-turbo\",\n", + " model=\"rank_zephyr\",\n", + " window_size=15,\n", ")\n", "\n", "visualize_retrieved_nodes(new_nodes)" @@ -542,14 +401,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### The correct result is ranked 1st after RankGPT rerank." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Sliding window example with RankZephyr." + "## Retrieve and Rerank top 10 results using RankVicuna, RankGPT" ] }, { @@ -558,445 +410,22 @@ "metadata": {}, "outputs": [], "source": [ - "from llama_index.core.retrievers import VectorIndexRetriever\n", - "from llama_index.core import QueryBundle\n", - "from llama_index.postprocessor.rankLLM_rerank import RankLLMRerank\n", - "\n", - "import pandas as pd\n", - "from IPython.display import display, HTML\n", - "\n", - "\n", - "def get_retrieved_nodes_mixed(\n", - " query_str,\n", + "# RankVicuna\n", + "new_nodes = get_retrieved_nodes(\n", + " \"Which date did Paul Gauguin arrive in Arles?\",\n", " vector_top_k=10,\n", " reranker_top_n=3,\n", - " with_reranker=False,\n", - " with_retrieval=False,\n", - " step_size=10,\n", - " model=\"zephyr\",\n", - " gpt_model=\"gpt-3.5-turbo\",\n", - "):\n", - " query_bundle = QueryBundle(query_str)\n", - " # configure retriever\n", - " retriever = VectorIndexRetriever(\n", - " index=index,\n", - " similarity_top_k=vector_top_k,\n", - " )\n", - " retrieved_nodes = retriever.retrieve(query_bundle)\n", - "\n", - " retrieved_nodes.reverse()\n", - "\n", - " if with_reranker:\n", - " # configure reranker\n", - " reranker = RankLLMRerank(\n", - " top_n=reranker_top_n,\n", - " model=model,\n", - " with_retrieval=with_retrieval,\n", - " gpt_model=gpt_model,\n", - " )\n", - " retrieved_nodes = reranker.postprocess_nodes(\n", - " retrieved_nodes, query_bundle\n", - " )\n", - "\n", - " return retrieved_nodes" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### After retrieving the top 50 results and reversing the order, the correct result is ranked 47th/50.\n", - "\n", - "### Expected result:\n", - "```After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers;```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", - "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
ScoreText
00.752651== Nazi-looted art ==

During the Nazi period (1933–1945) a great number of artworks by Van Gogh changed hands, many of them looted from Jewish collectors who were forced into exile or murdered. Some of these works have disappeared into private collections. Others have since resurfaced in museums, or at auction, or have been reclaimed, often in high-profile lawsuits, by their former owners. The German Lost Art Foundation still lists dozens of missing van Goghs and the American Alliance of Museums lists 73 van Goghs on the Nazi Era Provenance Internet Portal.


== References ==


=== Explanatory footnotes ===


=== Citations ===


=== General and cited sources ===


== External links ==

The Vincent van Gogh Gallery, the complete works and letters of Van Gogh
Vincent van Gogh The letters, the complete letters of Van Gogh (translated into English and annotated)
Vincent van Gogh, teaching resource on Van Gogh
Works by Vincent van Gogh at Project Gutenberg
Works by or about Vincent van Gogh at Internet Archive
Works by Vincent van Gogh at LibriVox (public domain audiobooks)
Vincent van Gogh at IMDb
10.765492Yellow meant the most to him, because it symbolised emotional truth. He used yellow as a symbol for sunlight, life, and God.Van Gogh strove to be a painter of rural life and nature; during his first summer in Arles he used his new palette to paint landscapes and traditional rural life. His belief that a power existed behind the natural led him to try to capture a sense of that power, or the essence of nature in his art, sometimes through the use of symbols. His renditions of the sower, at first copied from Jean-François Millet, reflect the influence of Thomas Carlyle and Friedrich Nietzsche's thoughts on the heroism of physical labour, as well as Van Gogh's religious beliefs: the sower as Christ sowing life beneath the hot sun. These were themes and motifs he returned to often to rework and develop. His paintings of flowers are filled with symbolism, but rather than use traditional Christian iconography he made up his own, where life is lived under the sun and work is an allegory of life. In Arles, having gained confidence after painting spring blossoms and learning to capture bright sunlight, he was ready to paint The Sower.
Van Gogh stayed within what he called the \"guise of reality\" and was critical of overly stylised works. He wrote afterwards that the abstraction of Starry Night had gone too far and that reality had \"receded too far in the background\". Hughes describes it as a moment of extreme visionary ecstasy: the stars are in a great whirl, reminiscent of Hokusai's Great Wave, the movement in the heaven above is reflected by the movement of the cypress on the earth below, and the painter's vision is \"translated into a thick, emphatic plasma of paint\".Between 1885 and his death in 1890, Van Gogh appears to have been building an oeuvre, a collection that reflected his personal vision and could be commercially successful. He was influenced by Blanc's definition of style, that a true painting required optimal use of colour, perspective and brushstrokes. Van Gogh applied the word \"purposeful\" to paintings he thought he had mastered, as opposed to those he thought of as studies.
20.768011His legacy is honored and celebrated by the Van Gogh Museum in Amsterdam, which holds the world's largest collection of his paintings and drawings.


== Letters ==

The most comprehensive primary source on Van Gogh is his correspondence with his younger brother, Theo. Their lifelong friendship, and most of what is known of Vincent's thoughts and theories of art, are recorded in the hundreds of letters they exchanged from 1872 until 1890. Theo van Gogh was an art dealer and provided his brother with financial and emotional support as well as access to influential people on the contemporary art scene.Theo kept all of Vincent's letters to him; but Vincent kept only a few of the letters he received. After both had died, Theo's widow Jo Bonger-van Gogh arranged for the publication of some of their letters. A few appeared in 1906 and 1913; the majority were published in 1914. Vincent's letters are eloquent and expressive, have been described as having a \"diary-like intimacy\", and read in parts like autobiography. Translator Arnold Pomerans wrote that their publication adds a \"fresh dimension to the understanding of Van Gogh's artistic achievement, an understanding granted to us by virtually no other painter\".

There are more than 600 letters from Vincent to Theo and around 40 from Theo to Vincent. There are 22 to his sister Wil, 58 to the painter Anthon van Rappard, 22 to Émile Bernard as well as individual letters to Paul Signac, Paul Gauguin, and the critic Albert Aurier. Some are illustrated with sketches. Many are undated, but art historians have been able to place most in chronological order. Problems in transcription and dating remain, mainly with those posted from Arles. While there, Vincent wrote around 200 letters in Dutch, French, and English. There is a gap in the record when he lived in Paris as the brothers lived together and had no need to correspond.The highly paid contemporary artist Jules Breton was frequently mentioned in Vincent's letters. In 1875 letters to Theo, Vincent mentions he saw Breton, discusses the Breton paintings he saw at a Salon, and discusses sending one of Breton's books but only on the condition that it be returned.
30.769008==== Self-portraits ====

Van Gogh created more than 43 self-portraits between 1885 and 1889. They were usually completed in series, such as those painted in Paris in mid-1887, and continued until shortly before his death. Generally the portraits were studies, created during periods when he was reluctant to mix with others or when he lacked models and painted himself.Van Gogh's self-portraits reflect a high degree of self-scrutiny. Often they were intended to mark important periods in his life; for example, the mid-1887 Paris series were painted at the point where he became aware of Claude Monet, Paul Cézanne and Signac. In Self-Portrait with Grey Felt Hat, heavy strains of paint spread outwards across the canvas. It is one of his most renowned self-portraits of that period, \"with its highly organized rhythmic brushstrokes, and the novel halo derived from the Neo-impressionist repertoire was what Van Gogh himself called a 'purposeful' canvas\".They contain a wide array of physiognomical representations. Van Gogh's mental and physical condition is usually apparent; he may appear unkempt, unshaven or with a neglected beard, with deeply sunken eyes, a weak jaw, or having lost teeth. Some show him with full lips, a long face or prominent skull, or sharpened, alert features. His hair is sometimes depicted in a vibrant reddish hue and at other times ash colored.Van Gogh's self-portraits vary stylistically. In those painted after December 1888, the strong contrast of vivid colors highlight the haggard pallor of his skin. Some depict the artist with a beard, others without. He can be seen with bandages in portraits executed just after he mutilated his ear. In only a few does he depict himself as a painter. Those painted in Saint-Rémy show the head from the right, the side opposite his damaged ear, as he painted himself reflected in his mirror.
40.769796Sien gave her daughter to her mother and baby Willem to her brother. Willem remembered visiting Rotterdam when he was about 12, when an uncle tried to persuade Sien to marry to legitimise the child. He believed Van Gogh was his father, but the timing of his birth makes this unlikely. Sien drowned herself in the River Scheldt in 1904.In September 1883, Van Gogh moved to Drenthe in the northern Netherlands. In December driven by loneliness, he went to live with his parents, then in Nuenen, North Brabant.


=== Emerging artist ===


==== Nuenen and Antwerp (1883–1886) ====

In Nuenen, Van Gogh focused on painting and drawing. Working outside and very quickly, he completed sketches and paintings of weavers and their cottages. Van Gogh also completed The Parsonage Garden at Nuenen, which was stolen from the Singer Laren in March 2020. From August 1884, Margot Begemann, a neighbour's daughter ten years his senior, joined him on his forays; she fell in love and he reciprocated, though less enthusiastically. They wanted to marry, but neither side of their families were in favor. Margot was distraught and took an overdose of strychnine, but survived after Van Gogh rushed her to a nearby hospital. On 26 March 1885, his father died of a heart attack.Van Gogh painted several groups of still lifes in 1885. During his two-year stay in Nuenen, he completed numerous drawings and watercolours and nearly 200 oil paintings. His palette consisted mainly of sombre earth tones, particularly dark brown, and showed no sign of the vivid colours that distinguished his later work.There was interest from a dealer in Paris early in 1885. Theo asked Vincent if he had paintings ready to exhibit. In May, Van Gogh responded with his first major work, The Potato Eaters, and a series of \"peasant character studies\" which were the culmination of several years of work.
50.770298==== Cypresses and olives ====

Fifteen canvases depict cypresses, a tree he became fascinated with in Arles. He brought life to the trees, which were traditionally seen as emblematic of death. The series of cypresses he began in Arles featured the trees in the distance, as windbreaks in fields; when he was at Saint-Rémy he brought them to the foreground. Vincent wrote to Theo in May 1889: \"Cypresses still preoccupy me, I should like to do something with them like my canvases of sunflowers\"; he went on to say, \"They are beautiful in line and proportion like an Egyptian obelisk.\"In mid-1889, and at his sister Wil's request, Van Gogh painted several smaller versions of Wheat Field with Cypresses. The works are characterised by swirls and densely painted impasto, and include The Starry Night, in which cypresses dominate the foreground. In addition to this, other notable works on cypresses include Cypresses (1889), Cypresses with Two Figures (1889–90), and Road with Cypress and Star (1890).During the last six or seven months of the year 1889, he had also created at least fifteen paintings of olive trees, a subject which he considered as demanding and compelling. Among these works are Olive Trees with the Alpilles in the Background (1889), about which in a letter to his brother Van Gogh wrote, \"At last I have a landscape with olives\". While in Saint-Rémy, Van Gogh spent time outside the asylum, where he painted trees in the olive groves. In these works, natural life is rendered as gnarled and arthritic as if a personification of the natural world, which are, according to Hughes, filled with \"a continuous field of energy of which nature is a manifestation\".
60.771924He wrote afterwards that the abstraction of Starry Night had gone too far and that reality had \"receded too far in the background\". Hughes describes it as a moment of extreme visionary ecstasy: the stars are in a great whirl, reminiscent of Hokusai's Great Wave, the movement in the heaven above is reflected by the movement of the cypress on the earth below, and the painter's vision is \"translated into a thick, emphatic plasma of paint\".Between 1885 and his death in 1890, Van Gogh appears to have been building an oeuvre, a collection that reflected his personal vision and could be commercially successful. He was influenced by Blanc's definition of style, that a true painting required optimal use of colour, perspective and brushstrokes. Van Gogh applied the word \"purposeful\" to paintings he thought he had mastered, as opposed to those he thought of as studies. He painted many series of studies; most of which were still lifes, many executed as colour experiments or as gifts to friends. The work in Arles contributed considerably to his oeuvre: those he thought the most important from that time were The Sower, Night Cafe, Memory of the Garden in Etten and Starry Night. With their broad brushstrokes, inventive perspectives, colours, contours and designs, these paintings represent the style he sought.


=== Major series ===

Van Gogh's stylistic developments are usually linked to the periods he spent living in different places across Europe. He was inclined to immerse himself in local cultures and lighting conditions, although he maintained a highly individual visual outlook throughout. His evolution as an artist was slow and he was aware of his painterly limitations. Van Gogh moved home often, perhaps to expose himself to new visual stimuli, and through exposure develop his technical skill. Art historian Melissa McQuillan believes the moves also reflect later stylistic changes and that Van Gogh used the moves to avoid conflict, and as a coping mechanism for when the idealistic artist was faced with the realities of his then current situation.
70.772942Among these was Two Peasant Women Digging in a Snow-Covered Field at Sunset. Hulsker believes that this small group of paintings formed the nucleus of many drawings and study sheets depicting landscapes and figures that Van Gogh worked on during this time. He comments that this short period was the only time that Van Gogh's illness had a significant effect on his work. Van Gogh asked his mother and his brother to send him drawings and rough work he had done in the early 1880s so he could work on new paintings from his old sketches. Belonging to this period is Sorrowing Old Man (\"At Eternity's Gate\"), a colour study Hulsker describes as \"another unmistakable remembrance of times long past\". His late paintings show an artist at the height of his abilities, according to the art critic Robert Hughes, \"longing for concision and grace\".After the birth of his nephew, Van Gogh wrote, \"I started right away to make a picture for him, to hang in their bedroom, branches of white almond blossom against a blue sky.\"
80.774411He moved to Nuenen after a short period of time in Drenthe and began work on several large paintings but destroyed most of them. The Potato Eaters and its companion pieces are the only ones to have survived. Following a visit to the Rijksmuseum Van Gogh wrote of his admiration for the quick, economical brushwork of the Dutch Masters, especially Rembrandt and Frans Hals. He was aware many of his faults were due to lack of experience and technical expertise, so in November 1885 he travelled to Antwerp and later Paris to learn and develop his skills.
Theo criticised The Potato Eaters for its dark palette, which he thought unsuitable for a modern style. During Van Gogh's stay in Paris between 1886 and 1887, he tried to master a new, lighter palette. His Portrait of Père Tanguy (1887) shows his success with the brighter palette and is evidence of an evolving personal style. Charles Blanc's treatise on colour interested him greatly and led him to work with complementary colours. Van Gogh came to believe that the effect of colour went beyond the descriptive; he said that \"colour expresses something in itself\". According to Hughes, Van Gogh perceived colour as having a \"psychological and moral weight\", as exemplified in the garish reds and greens of The Night Café, a work he wanted to \"express the terrible passions of humanity\". Yellow meant the most to him, because it symbolised emotional truth. He used yellow as a symbol for sunlight, life, and God.Van Gogh strove to be a painter of rural life and nature; during his first summer in Arles he used his new palette to paint landscapes and traditional rural life. His belief that a power existed behind the natural led him to try to capture a sense of that power, or the essence of nature in his art, sometimes through the use of symbols. His renditions of the sower, at first copied from Jean-François Millet, reflect the influence of Thomas Carlyle and Friedrich Nietzsche's thoughts on the heroism of physical labour, as well as Van Gogh's religious beliefs: the sower as Christ sowing life beneath the hot sun. These were themes and motifs he returned to often to rework and develop.
90.774471With their broad brushstrokes, inventive perspectives, colours, contours and designs, these paintings represent the style he sought.


=== Major series ===

Van Gogh's stylistic developments are usually linked to the periods he spent living in different places across Europe. He was inclined to immerse himself in local cultures and lighting conditions, although he maintained a highly individual visual outlook throughout. His evolution as an artist was slow and he was aware of his painterly limitations. Van Gogh moved home often, perhaps to expose himself to new visual stimuli, and through exposure develop his technical skill. Art historian Melissa McQuillan believes the moves also reflect later stylistic changes and that Van Gogh used the moves to avoid conflict, and as a coping mechanism for when the idealistic artist was faced with the realities of his then current situation.


==== Portraits ====

Van Gogh said portraiture was his greatest interest. \"What I'm most passionate about, much much more than all the rest in my profession\", he wrote in 1890, \"is the portrait, the modern portrait.\" It is \"the only thing in painting that moves me deeply and that gives me a sense of the infinite.\" He wrote to his sister that he wished to paint portraits that would endure, and that he would use colour to capture their emotions and character rather than aiming for photographic realism. Those closest to Van Gogh are mostly absent from his portraits; he rarely painted Theo, Van Rappard or Bernard. The portraits of his mother were from photographs.Van Gogh painted Arles' postmaster Joseph Roulin and his family repeatedly. In five versions of La Berceuse (The Lullaby), Van Gogh painted Augustine Roulin quietly holding a rope that rocks the unseen cradle of her infant daughter. Van Gogh had planned for it to be the central image of a triptych, flanked by paintings of sunflowers.
100.776510These began a compelling mythology of Van Gogh as an intense and dedicated painter who suffered for his art and died young. In 1934, the novelist Irving Stone wrote a biographical novel of Van Gogh's life titled Lust for Life, based on Van Gogh's letters to Theo. This novel and the 1956 film further enhanced his fame, especially in the United States where Stone surmised only a few hundred people had heard of Van Gogh prior to his surprise best-selling book.In 1957, Francis Bacon based a series of paintings on reproductions of Van Gogh's The Painter on the Road to Tarascon, the original of which was destroyed during the Second World War. Bacon was inspired by an image he described as \"haunting\", and regarded Van Gogh as an alienated outsider, a position which resonated with him. Bacon identified with Van Gogh's theories of art and quoted lines written to Theo: \"[R]eal painters do not paint things as they are ... [T]hey paint them as they themselves feel them to be.\"Van Gogh's works are among the world's most expensive paintings. Those sold for over US$100 million (today's equivalent) include Portrait of Dr Gachet, Portrait of Joseph Roulin and Irises. The Metropolitan Museum of Art acquired a copy of Wheat Field with Cypresses in 1993 for US$57 million by using funds donated by publisher, diplomat and philanthropist Walter Annenberg. In 2015, L'Allée des Alyscamps sold for US$66.3 million at Sotheby's, New York, exceeding its reserve of US$40 million.Minor planet 4457 van Gogh is named in his honour.In October 2022, two activists protesting the effects of the fossil fuel industry on climate change threw a can of tomato soup on Van Gogh's Sunflowers in the National Gallery, London, and then glued their hands to the gallery wall. As the painting was covered by glass it was not damaged.
110.776510==== Flowers ====

Van Gogh painted several landscapes with flowers, including roses, lilacs, irises, and sunflowers. Some reflect his interests in the language of colour, and also in Japanese ukiyo-e. There are two series of dying sunflowers. The first was painted in Paris in 1887 and shows flowers lying on the ground. The second set was completed a year later in Arles and is of bouquets in a vase positioned in early morning light. Both are built from thickly layered paintwork, which, according to the London National Gallery, evoke the \"texture of the seed-heads\".In these series, Van Gogh was not preoccupied by his usual interest in filling his paintings with subjectivity and emotion; rather, the two series are intended to display his technical skill and working methods to Gauguin, who was about to visit. The 1888 paintings were created during a rare period of optimism for the artist. Vincent wrote to Theo in August 1888:

I'm painting with the gusto of a Marseillais eating bouillabaisse, which won't surprise you when it's a question of painting large sunflowers ... If I carry out this plan there'll be a dozen or so panels. The whole thing will therefore be a symphony in blue and yellow. I work on it all these mornings, from sunrise. Because the flowers wilt quickly and it's a matter of doing the whole thing in one go.
The sunflowers were painted to decorate the walls in anticipation of Gauguin's visit, and Van Gogh placed individual works around the Yellow House's guest room in Arles. Gauguin was deeply impressed and later acquired two of the Paris versions. After Gauguin's departure, Van Gogh imagined the two major versions of the sunflowers as wings of the Berceuse Triptych, and included them in his Les XX in Brussels exhibit. Today the major pieces of the series are among his best known, celebrated for the sickly connotations of the colour yellow and its tie-in with the Yellow House, the expressionism of the brush strokes, and their contrast against often dark backgrounds.
120.778443His father was the youngest son of a minister. The two met when Anna's younger sister, Cornelia, married Theodorus's older brother Vincent (Cent). Van Gogh's parents married in May 1851 and moved to Zundert. His brother Theo was born on 1 May 1857. There was another brother, Cor, and three sisters: Elisabeth, Anna, and Willemina (known as \"Wil\"). In later life, Van Gogh remained in touch only with Willemina and Theo. Theodorus's salary as a minister was modest, but the Church also supplied the family with a house, a maid, two cooks, a gardener, a carriage and horse; his mother Anna instilled in the children a duty to uphold the family's high social position.Van Gogh was a serious and thoughtful child. He was taught at home by his mother and a governess, and in 1860, was sent to the village school. In 1864, he was placed in a boarding school at Zevenbergen, where he felt abandoned, and he campaigned to come home. Instead, in 1866, his parents sent him to the middle school in Tilburg, where he was also deeply unhappy. His interest in art began at a young age. He was encouraged to draw as a child by his mother, and his early drawings are expressive, but do not approach the intensity of his later work. Constant Cornelis Huijsmans, who had been a successful artist in Paris, taught the students at Tilburg. His philosophy was to reject technique in favour of capturing the impressions of things, particularly nature or common objects. Van Gogh's profound unhappiness seems to have overshadowed the lessons, which had little effect. In March 1868, he abruptly returned home. He later wrote that his youth was \"austere and cold, and sterile\".In July 1869, Van Gogh's uncle Cent obtained a position for him at the art dealers Goupil & Cie in The Hague. After completing his training in 1873, he was transferred to Goupil's London branch on Southampton Street, and took lodgings at 87 Hackford Road, Stockwell.
130.778511=== Death ===

On 27 July 1890, aged 37, Van Gogh shot himself in the chest with a revolver. The shooting may have taken place in the wheat field in which he had been painting, or in a local barn. The bullet was deflected by a rib and passed through his chest without doing apparent damage to internal organs – possibly stopped by his spine. He was able to walk back to the Auberge Ravoux, where he was attended to by two doctors. One of them, Dr Gachet, served as a war surgeon in 1870 and had extensive knowledge of gunshots. Vincent was possibly attended to during the night by Dr Gachet's son Paul Louis Gachet and the innkeeper, Arthur Ravoux. The following morning, Theo rushed to his brother's side, finding him in good spirits. But within hours Vincent's health began to fail, suffering from an infection resulting from the wound. He died in the early hours of 29 July. According to Theo, Vincent's last words were: \"The sadness will last forever\".
Van Gogh was buried on 30 July, in the municipal cemetery of Auvers-sur-Oise. The funeral was attended by Theo van Gogh, Andries Bonger, Charles Laval, Lucien Pissarro, Émile Bernard, Julien Tanguy and Paul Gachet, among twenty family members, friends and locals. Theo suffered from syphilis, and his health began to decline further after his brother's death. Weak and unable to come to terms with Vincent's absence, he died on 25 January 1891 at Den Dolder and was buried in Utrecht. In 1914, Johanna van Gogh-Bonger had Theo's body exhumed and moved from Utrecht to be re-buried alongside Vincent's at Auvers-sur-Oise.There have been numerous debates as to the nature of Van Gogh's illness and its effect on his work, and many retrospective diagnoses have been proposed. The consensus is that Van Gogh had an episodic condition with periods of normal functioning. Perry was the first to suggest bipolar disorder in 1947, and this has been supported by the psychiatrists Hemphill and Blumer.
140.780200He was encouraged to draw as a child by his mother, and his early drawings are expressive, but do not approach the intensity of his later work. Constant Cornelis Huijsmans, who had been a successful artist in Paris, taught the students at Tilburg. His philosophy was to reject technique in favour of capturing the impressions of things, particularly nature or common objects. Van Gogh's profound unhappiness seems to have overshadowed the lessons, which had little effect. In March 1868, he abruptly returned home. He later wrote that his youth was \"austere and cold, and sterile\".In July 1869, Van Gogh's uncle Cent obtained a position for him at the art dealers Goupil & Cie in The Hague. After completing his training in 1873, he was transferred to Goupil's London branch on Southampton Street, and took lodgings at 87 Hackford Road, Stockwell. This was a happy time for Van Gogh; he was successful at work and, at 20, was earning more than his father. Theo's wife, Jo Van Gogh-Bonger, later remarked that this was the best year of Vincent's life. He became infatuated with his landlady's daughter, Eugénie Loyer, but she rejected him after he confessed his feelings; she was secretly engaged to a former lodger. He grew more isolated and religiously fervent. His father and uncle arranged a transfer to Paris in 1875, where he became resentful of issues such as the degree to which the art dealers commodified art, and he was dismissed a year later.
In April 1876, he returned to England to take unpaid work as a supply teacher in a small boarding school in Ramsgate. When the proprietor moved to Isleworth in Middlesex, Van Gogh went with him. The arrangement was not successful; he left to become a Methodist minister's assistant. His parents had meanwhile moved to Etten; in 1876 he returned home at Christmas for six months and took work at a bookshop in Dordrecht. He was unhappy in the position, and spent his time doodling or translating passages from the Bible into English, French, and German.
150.780329Influenced by Van Gogh, Matisse abandoned his earth-coloured palette for bright colours.In Paris in 1901, a large Van Gogh retrospective was held at the Bernheim-Jeune Gallery, which excited André Derain and Maurice de Vlaminck, and contributed to the emergence of Fauvism. Important group exhibitions took place with the Sonderbund artists in Cologne in 1912, the Armory Show, New York in 1913, and Berlin in 1914. Henk Bremmer was instrumental in teaching and talking about Van Gogh, and introduced Helene Kröller-Müller to Van Gogh's art; she became an avid collector of his work. The early figures in German Expressionism such as Emil Nolde acknowledged a debt to Van Gogh's work. Bremmer assisted Jacob Baart de la Faille, whose catalogue raisonné L'Oeuvre de Vincent van Gogh appeared in 1928.
Van Gogh's fame reached its first peak in Austria and Germany before World War I, helped by the publication of his letters in three volumes in 1914. His letters are expressive and literate, and have been described as among the foremost 19th-century writings of their kind. These began a compelling mythology of Van Gogh as an intense and dedicated painter who suffered for his art and died young. In 1934, the novelist Irving Stone wrote a biographical novel of Van Gogh's life titled Lust for Life, based on Van Gogh's letters to Theo. This novel and the 1956 film further enhanced his fame, especially in the United States where Stone surmised only a few hundred people had heard of Van Gogh prior to his surprise best-selling book.In 1957, Francis Bacon based a series of paintings on reproductions of Van Gogh's The Painter on the Road to Tarascon, the original of which was destroyed during the Second World War. Bacon was inspired by an image he described as \"haunting\", and regarded Van Gogh as an alienated outsider, a position which resonated with him.
160.780921Theo suffered from syphilis, and his health began to decline further after his brother's death. Weak and unable to come to terms with Vincent's absence, he died on 25 January 1891 at Den Dolder and was buried in Utrecht. In 1914, Johanna van Gogh-Bonger had Theo's body exhumed and moved from Utrecht to be re-buried alongside Vincent's at Auvers-sur-Oise.There have been numerous debates as to the nature of Van Gogh's illness and its effect on his work, and many retrospective diagnoses have been proposed. The consensus is that Van Gogh had an episodic condition with periods of normal functioning. Perry was the first to suggest bipolar disorder in 1947, and this has been supported by the psychiatrists Hemphill and Blumer. Biochemist Wilfred Arnold has countered that the symptoms are more consistent with acute intermittent porphyria, noting that the popular link between bipolar disorder and creativity might be spurious. Temporal lobe epilepsy with bouts of depression has also been suggested. Whatever the diagnosis, his condition was likely worsened by malnutrition, overwork, insomnia and alcohol.


== Style and works ==


=== Artistic development ===
Van Gogh drew and painted with watercolours while at school, but only a few examples survive and the authorship of some has been challenged. When he took up art as an adult, he began at an elementary level. In early 1882, his uncle, Cornelis Marinus, owner of a well-known gallery of contemporary art in Amsterdam, asked for drawings of The Hague. Van Gogh's work did not live up to expectations. Marinus offered a second commission, specifying the subject matter in detail, but was again disappointed with the result. Van Gogh persevered; he experimented with lighting in his studio using variable shutters and different drawing materials. For more than a year he worked on single figures – highly elaborate studies in black and white, which at the time gained him only criticism. Later, they were recognised as early masterpieces.In August 1882, Theo gave Vincent money to buy materials for working en plein air. Vincent wrote that he could now \"go on painting with new vigour\".
170.784223Those sold for over US$100 million (today's equivalent) include Portrait of Dr Gachet, Portrait of Joseph Roulin and Irises. The Metropolitan Museum of Art acquired a copy of Wheat Field with Cypresses in 1993 for US$57 million by using funds donated by publisher, diplomat and philanthropist Walter Annenberg. In 2015, L'Allée des Alyscamps sold for US$66.3 million at Sotheby's, New York, exceeding its reserve of US$40 million.Minor planet 4457 van Gogh is named in his honour.In October 2022, two activists protesting the effects of the fossil fuel industry on climate change threw a can of tomato soup on Van Gogh's Sunflowers in the National Gallery, London, and then glued their hands to the gallery wall. As the painting was covered by glass it was not damaged.


=== Van Gogh Museum ===

Van Gogh's nephew and namesake, Vincent Willem van Gogh (1890–1978), inherited the estate after his mother's death in 1925. During the early 1950s he arranged for the publication of a complete edition of the letters presented in four volumes and several languages. He then began negotiations with the Dutch government to subsidise a foundation to purchase and house the entire collection. Theo's son participated in planning the project in the hope that the works would be exhibited under the best possible conditions. The project began in 1963; architect Gerrit Rietveld was commissioned to design it, and after his death in 1964 Kisho Kurokawa took charge. Work progressed throughout the 1960s, with 1972 as the target for its grand opening.The Van Gogh Museum opened in the Museumplein in Amsterdam in 1973. It became the second most popular museum in the Netherlands, after the Rijksmuseum, regularly receiving more than 1.5 million visitors a year. In 2015 it had a record 1.9 million. Eighty-five percent of the visitors come from other countries.
180.786045They wanted to marry, but neither side of their families were in favor. Margot was distraught and took an overdose of strychnine, but survived after Van Gogh rushed her to a nearby hospital. On 26 March 1885, his father died of a heart attack.Van Gogh painted several groups of still lifes in 1885. During his two-year stay in Nuenen, he completed numerous drawings and watercolours and nearly 200 oil paintings. His palette consisted mainly of sombre earth tones, particularly dark brown, and showed no sign of the vivid colours that distinguished his later work.There was interest from a dealer in Paris early in 1885. Theo asked Vincent if he had paintings ready to exhibit. In May, Van Gogh responded with his first major work, The Potato Eaters, and a series of \"peasant character studies\" which were the culmination of several years of work. When he complained that Theo was not making enough effort to sell his paintings in Paris, his brother responded that they were too dark and not in keeping with the bright style of Impressionism. In August his work was publicly exhibited for the first time, in the shop windows of the dealer Leurs in The Hague. One of his young peasant sitters became pregnant in September 1885; Van Gogh was accused of forcing himself upon her, and the village priest forbade parishioners to model for him.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
He moved to Antwerp that November and rented a room above a paint dealer's shop in the rue des Images (Lange Beeldekensstraat). He lived in poverty and ate poorly, preferring to spend the money Theo sent on painting materials and models. Bread, coffee and tobacco became his staple diet. In February 1886, he wrote to Theo that he could only remember eating six hot meals since the previous May. His teeth became loose and painful. In Antwerp he applied himself to the study of colour theory and spent time in museums—particularly studying the work of Peter Paul Rubens—and broadened his palette to include carmine, cobalt blue and emerald green.
190.786288Vincent Willem van Gogh (Dutch: [ˈvɪnsɛnt ˈʋɪləɱ‿vɑŋ‿ˈɣɔx] ; 30 March 1853 – 29 July 1890) was a Dutch Post-Impressionist painter who is among the most famous and influential figures in the history of Western art. In just over a decade, he created approximately 2100 artworks, including around 860 oil paintings, most of them in the last two years of his life. His oeuvre includes landscapes, still lifes, portraits, and self-portraits, most of which are characterized by bold colors and dramatic brushwork that contributed to the rise of expressionism in modern art. Van Gogh's work was beginning to gain critical attention before he died at age 37, by what was suspected at the time to be a suicide. During his lifetime, only one of Van Gogh's paintings, The Red Vineyard, was sold.
Born into an upper-middle-class family, Van Gogh drew as a child and was serious, quiet and thoughtful, but showed signs of mental instability. As a young man, he worked as an art dealer, often travelling, but became depressed after he was transferred to London. He turned to religion and spent time as a missionary in southern Belgium. Later he drifted into ill-health and solitude. He was keenly aware of modernist trends in art and, while back with his parents, took up painting in 1881. His younger brother, Theo, supported him financially, and the two of them maintained a long correspondence.
Van Gogh's early works consist of mostly still lifes and depictions of peasant laborers. In 1886, he moved to Paris, where he met members of the artistic avant-garde, including Émile Bernard and Paul Gauguin, who were seeking new paths beyond Impressionism. Frustrated in Paris and inspired by a growing spirit of artistic change and collaboration, in February 1888, Van Gogh moved to Arles in southern France to establish an artistic retreat and commune. Once there, Van Gogh's art changed.
200.788811==== Wheat fields ====

Van Gogh made several painting excursions during visits to the landscape around Arles. He made paintings of harvests, wheat fields and other rural landmarks of the area, including The Old Mill (1888); a good example of a picturesque structure bordering the wheat fields beyond. At various points, Van Gogh painted the view from his window – at The Hague, Antwerp, and Paris. These works culminated in The Wheat Field series, which depicted the view from his cells in the asylum at Saint-Rémy.Many of the late paintings are sombre but essentially optimistic and, right up to the time of Van Gogh's death, reflect his desire to return to lucid mental health. Yet some of his final works reflect his deepening concerns. Writing in July 1890, from Auvers, Van Gogh said that he had become absorbed \"in the immense plain against the hills, boundless as the sea, delicate yellow\".Van Gogh was captivated by the fields in May when the wheat was young and green. His Wheatfields at Auvers with White House shows a more subdued palette of yellows and blues, which creates a sense of idyllic harmony.About 10 July 1890, Van Gogh wrote to Theo of \"vast fields of wheat under troubled skies\". Wheatfield with Crows shows the artist's state of mind in his final days; Hulsker describes the work as a \"doom-filled painting with threatening skies and ill-omened crows\". Its dark palette and heavy brushstrokes convey a sense of menace.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


== Reputation and legacy ==

After Van Gogh's first exhibitions in the late 1880s, his reputation grew steadily among artists, art critics, dealers and collectors. In 1887, André Antoine hung Van Gogh's alongside works of Georges Seurat and Paul Signac, at the Théâtre Libre in Paris; some were acquired by Julien Tanguy. In 1889, his work was described in the journal Le Moderniste Illustré by Albert Aurier as characterised by \"fire, intensity, sunshine\". Ten paintings were shown at the Société des Artistes Indépendants, in Brussels in January 1890.
210.789036== Reputation and legacy ==

After Van Gogh's first exhibitions in the late 1880s, his reputation grew steadily among artists, art critics, dealers and collectors. In 1887, André Antoine hung Van Gogh's alongside works of Georges Seurat and Paul Signac, at the Théâtre Libre in Paris; some were acquired by Julien Tanguy. In 1889, his work was described in the journal Le Moderniste Illustré by Albert Aurier as characterised by \"fire, intensity, sunshine\". Ten paintings were shown at the Société des Artistes Indépendants, in Brussels in January 1890. French president Marie François Sadi Carnot was said to have been impressed by Van Gogh's work.After Van Gogh's death, memorial exhibitions were held in Brussels, Paris, The Hague and Antwerp. His work was shown in several high-profile exhibitions, including six works at Les XX; in 1891 there was a retrospective exhibition in Brussels. In 1892, Octave Mirbeau wrote that Van Gogh's suicide was an \"infinitely sadder loss for art ... even though the populace has not crowded to a magnificent funeral, and poor Vincent van Gogh, whose demise means the extinction of a beautiful flame of genius, has gone to his death as obscure and neglected as he lived.\"Theo died in January 1891, removing Vincent's most vocal and well-connected champion. Theo's widow Johanna van Gogh-Bonger was a Dutchwoman in her twenties who had not known either her husband or her brother-in-law very long and who suddenly had to take care of several hundreds of paintings, letters and drawings, as well as her infant son, Vincent Willem van Gogh. Gauguin was not inclined to offer assistance in promoting Van Gogh's reputation, and Johanna's brother Andries Bonger also seemed lukewarm about his work. Aurier, one of Van Gogh's earliest supporters among the critics, died of typhoid fever in 1892 at the age of 27.
220.790108When he took up art as an adult, he began at an elementary level. In early 1882, his uncle, Cornelis Marinus, owner of a well-known gallery of contemporary art in Amsterdam, asked for drawings of The Hague. Van Gogh's work did not live up to expectations. Marinus offered a second commission, specifying the subject matter in detail, but was again disappointed with the result. Van Gogh persevered; he experimented with lighting in his studio using variable shutters and different drawing materials. For more than a year he worked on single figures – highly elaborate studies in black and white, which at the time gained him only criticism. Later, they were recognised as early masterpieces.In August 1882, Theo gave Vincent money to buy materials for working en plein air. Vincent wrote that he could now \"go on painting with new vigour\". From early 1883, he worked on multi-figure compositions. He had some of them photographed, but when his brother remarked that they lacked liveliness and freshness, he destroyed them and turned to oil painting. Van Gogh turned to well-known Hague School artists like Weissenbruch and Blommers, and he received technical advice from them as well as from painters like De Bock and Van der Weele, both of the Hague School's second generation. He moved to Nuenen after a short period of time in Drenthe and began work on several large paintings but destroyed most of them. The Potato Eaters and its companion pieces are the only ones to have survived. Following a visit to the Rijksmuseum Van Gogh wrote of his admiration for the quick, economical brushwork of the Dutch Masters, especially Rembrandt and Frans Hals. He was aware many of his faults were due to lack of experience and technical expertise, so in November 1885 he travelled to Antwerp and later Paris to learn and develop his skills.
Theo criticised The Potato Eaters for its dark palette, which he thought unsuitable for a modern style. During Van Gogh's stay in Paris between 1886 and 1887, he tried to master a new, lighter palette.
230.790171Soon after, he first painted in oils, bought with money borrowed from Theo. He liked the medium, and he spread the paint liberally, scraping from the canvas and working back with the brush. He wrote that he was surprised at how good the results were.
By March 1882, Mauve appeared to have gone cold towards Van Gogh, and he stopped replying to his letters. He had learned of Van Gogh's new domestic arrangement with an alcoholic prostitute, Clasina Maria \"Sien\" Hoornik (1850–1904), and her young daughter. Van Gogh had met Sien towards the end of January 1882, when she had a five-year-old daughter and was pregnant. She had previously borne two children who died, but Van Gogh was unaware of this. On 2 July, she gave birth to a baby boy, Willem. When Van Gogh's father discovered the details of their relationship, he put pressure on his son to abandon Sien and her two children. Vincent at first defied him, and considered moving the family out of the city, but in late 1883, he left Sien and the children.Poverty may have pushed Sien back into prostitution; the home became less happy and Van Gogh may have felt family life was irreconcilable with his artistic development. Sien gave her daughter to her mother and baby Willem to her brother. Willem remembered visiting Rotterdam when he was about 12, when an uncle tried to persuade Sien to marry to legitimise the child. He believed Van Gogh was his father, but the timing of his birth makes this unlikely. Sien drowned herself in the River Scheldt in 1904.In September 1883, Van Gogh moved to Drenthe in the northern Netherlands. In December driven by loneliness, he went to live with his parents, then in Nuenen, North Brabant.


=== Emerging artist ===


==== Nuenen and Antwerp (1883–1886) ====

In Nuenen, Van Gogh focused on painting and drawing. Working outside and very quickly, he completed sketches and paintings of weavers and their cottages.
240.791175His friendship with Gauguin ended after a confrontation with a razor when, in a rage, he severed his left ear. Van Gogh spent time in psychiatric hospitals, including a period at Saint-Rémy. After he discharged himself and moved to the Auberge Ravoux in Auvers-sur-Oise near Paris, he came under the care of the homeopathic doctor Paul Gachet. His depression persisted, and on 27 July 1890, Van Gogh is believed to have shot himself in the chest with a revolver, dying from his injuries two days later.
Van Gogh's work began to attract critical artistic attention in the last year of his life. After his death, Van Gogh's art and life story captured public imagination as an emblem of misunderstood genius, due in large part to the efforts of his widowed sister-in-law Johanna van Gogh-Bonger. His bold use of color, expressive line and thick application of paint inspired avant-garde artistic groups like the Fauves and German Expressionists in the early 20th century. Van Gogh's work gained widespread critical and commercial success in the following decades, and he has become a lasting icon of the romantic ideal of the tortured artist. Today, Van Gogh's works are among the world's most expensive paintings ever sold. His legacy is honored and celebrated by the Van Gogh Museum in Amsterdam, which holds the world's largest collection of his paintings and drawings.


== Letters ==

The most comprehensive primary source on Van Gogh is his correspondence with his younger brother, Theo. Their lifelong friendship, and most of what is known of Vincent's thoughts and theories of art, are recorded in the hundreds of letters they exchanged from 1872 until 1890. Theo van Gogh was an art dealer and provided his brother with financial and emotional support as well as access to influential people on the contemporary art scene.Theo kept all of Vincent's letters to him; but Vincent kept only a few of the letters he received. After both had died, Theo's widow Jo Bonger-van Gogh arranged for the publication of some of their letters. A few appeared in 1906 and 1913; the majority were published in 1914.
250.792039He turned around and returned without making his presence known. It appears Breton was unaware of Van Gogh or his attempted visit. There are no known letters between the two artists and Van Gogh is not one of the contemporary artists discussed by Breton in his 1891 autobiography Life of an Artist.


== Life ==


=== Early years ===

Vincent Willem van Gogh was born on 30 March 1853 in Groot-Zundert, in the predominantly Catholic province of North Brabant in the Netherlands. He was the oldest surviving child of Theodorus van Gogh (1822–1885), a minister of the Dutch Reformed Church, and his wife, Anna Cornelia Carbentus (1819–1907). Van Gogh was given the name of his grandfather and of a brother stillborn exactly a year before his birth. Vincent was a common name in the Van Gogh family. The name had been borne by his grandfather, the prominent art dealer Vincent (1789–1874), and a theology graduate at the University of Leiden in 1811. This Vincent had six sons, three of whom became art dealers, and may have been named after his great-uncle, a sculptor (1729–1802).Van Gogh's mother came from a prosperous family in The Hague. His father was the youngest son of a minister. The two met when Anna's younger sister, Cornelia, married Theodorus's older brother Vincent (Cent). Van Gogh's parents married in May 1851 and moved to Zundert. His brother Theo was born on 1 May 1857. There was another brother, Cor, and three sisters: Elisabeth, Anna, and Willemina (known as \"Wil\"). In later life, Van Gogh remained in touch only with Willemina and Theo. Theodorus's salary as a minister was modest, but the Church also supplied the family with a house, a maid, two cooks, a gardener, a carriage and horse; his mother Anna instilled in the children a duty to uphold the family's high social position.Van Gogh was a serious and thoughtful child.
260.792563The physician was not fond of the painting and used it to repair a chicken coop, then gave it away. In 2016, the portrait was housed at the Pushkin Museum of Fine Arts and estimated to be worth over $50 million.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Saint-Rémy (May 1889 – May 1890) ====

Van Gogh entered the Saint-Paul-de-Mausole asylum on 8 May 1889, accompanied by his caregiver, Frédéric Salles, a Protestant clergyman. Saint-Paul was a former monastery in Saint-Rémy, located less than 30 kilometres (19 mi) from Arles, and it was run by a former naval doctor, Théophile Peyron. Van Gogh had two cells with barred windows, one of which he used as a studio. The clinic and its garden became the main subjects of his paintings. He made several studies of the hospital's interiors, such as Vestibule of the Asylum and Saint-Rémy (September 1889), and its gardens, such as Lilacs (May 1889). Some of his works from this time are characterised by swirls, such as The Starry Night. He was allowed short supervised walks, during which time he painted cypresses and olive trees, including Valley with Ploughman Seen from Above, Olive Trees with the Alpilles in the Background 1889, Cypresses 1889, Cornfield with Cypresses (1889), Country road in Provence by Night (1890). In September 1889, he produced two further versions of Bedroom in Arles and The Gardener.Limited access to life outside the clinic resulted in a shortage of subject matter. Van Gogh instead worked on interpretations of other artist's paintings, such as Millet's The Sower and Noonday Rest, and variations on his own earlier work. Van Gogh was an admirer of the Realism of Jules Breton, Gustave Courbet and Millet, and he compared his copies to a musician's interpreting Beethoven.His Prisoners' Round (after Gustave Doré) (1890) was painted after an engraving by Gustave Doré (1832–1883).
270.792953One of his young peasant sitters became pregnant in September 1885; Van Gogh was accused of forcing himself upon her, and the village priest forbade parishioners to model for him.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
He moved to Antwerp that November and rented a room above a paint dealer's shop in the rue des Images (Lange Beeldekensstraat). He lived in poverty and ate poorly, preferring to spend the money Theo sent on painting materials and models. Bread, coffee and tobacco became his staple diet. In February 1886, he wrote to Theo that he could only remember eating six hot meals since the previous May. His teeth became loose and painful. In Antwerp he applied himself to the study of colour theory and spent time in museums—particularly studying the work of Peter Paul Rubens—and broadened his palette to include carmine, cobalt blue and emerald green. Van Gogh bought Japanese ukiyo-e woodcuts in the docklands, later incorporating elements of their style into the background of some of his paintings. He was drinking heavily again, and was hospitalised between February and March 1886, when he was possibly also treated for syphilis.

After his recovery, despite his antipathy towards academic teaching, he took the higher-level admission exams at the Academy of Fine Arts in Antwerp and, in January 1886, matriculated in painting and drawing. He became ill and run down by overwork, poor diet and excessive smoking. He started to attend drawing classes after plaster models at the Antwerp Academy on 18 January 1886. He quickly got into trouble with Charles Verlat, the director of the academy and teacher of a painting class, because of his unconventional painting style. Van Gogh had also clashed with the instructor of the drawing class Franz Vinck. Van Gogh finally started to attend the drawing classes after antique plaster models given by Eugène Siberdt. Soon Siberdt and Van Gogh came into conflict when the latter did not comply with Siberdt's requirement that drawings express the contour and concentrate on the line. When Van Gogh was required to draw the Venus de Milo during a drawing class, he produced the limbless, naked torso of a Flemish peasant woman.
280.793931In September 1889, he produced two further versions of Bedroom in Arles and The Gardener.Limited access to life outside the clinic resulted in a shortage of subject matter. Van Gogh instead worked on interpretations of other artist's paintings, such as Millet's The Sower and Noonday Rest, and variations on his own earlier work. Van Gogh was an admirer of the Realism of Jules Breton, Gustave Courbet and Millet, and he compared his copies to a musician's interpreting Beethoven.His Prisoners' Round (after Gustave Doré) (1890) was painted after an engraving by Gustave Doré (1832–1883). Tralbaut suggests that the face of the prisoner in the centre of the painting looking towards the viewer is Van Gogh himself; Jan Hulsker discounts this.Between February and April 1890, Van Gogh suffered a severe relapse. Depressed and unable to bring himself to write, he was still able to paint and draw a little during this time, and he later wrote to Theo that he had made a few small canvases \"from memory ... reminisces of the North\". Among these was Two Peasant Women Digging in a Snow-Covered Field at Sunset. Hulsker believes that this small group of paintings formed the nucleus of many drawings and study sheets depicting landscapes and figures that Van Gogh worked on during this time. He comments that this short period was the only time that Van Gogh's illness had a significant effect on his work. Van Gogh asked his mother and his brother to send him drawings and rough work he had done in the early 1880s so he could work on new paintings from his old sketches. Belonging to this period is Sorrowing Old Man (\"At Eternity's Gate\"), a colour study Hulsker describes as \"another unmistakable remembrance of times long past\".
290.798987He grew more isolated and religiously fervent. His father and uncle arranged a transfer to Paris in 1875, where he became resentful of issues such as the degree to which the art dealers commodified art, and he was dismissed a year later.
In April 1876, he returned to England to take unpaid work as a supply teacher in a small boarding school in Ramsgate. When the proprietor moved to Isleworth in Middlesex, Van Gogh went with him. The arrangement was not successful; he left to become a Methodist minister's assistant. His parents had meanwhile moved to Etten; in 1876 he returned home at Christmas for six months and took work at a bookshop in Dordrecht. He was unhappy in the position, and spent his time doodling or translating passages from the Bible into English, French, and German. He immersed himself in Christianity and became increasingly pious and monastic. According to his flatmate of the time, Paulus van Görlitz, Van Gogh ate frugally, avoiding meat.To support his religious conviction and his desire to become a pastor, in 1877, the family sent him to live with his uncle Johannes Stricker, a respected theologian, in Amsterdam. Van Gogh prepared for the University of Amsterdam theology entrance examination; he failed the exam and left his uncle's house in July 1878. He undertook, but also failed, a three-month course at a Protestant missionary school in Laken, near Brussels. In January 1879, he took up a post as a missionary at Petit-Wasmes in the working class, coal-mining district of Borinage in Belgium. To show support for his impoverished congregation, he gave up his comfortable lodgings at a bakery to a homeless person and moved to a small hut, where he slept on straw. His humble living conditions did not endear him to church authorities, who dismissed him for \"undermining the dignity of the priesthood\". He then walked the 75 kilometres (47 mi) to Brussels, returned briefly to Cuesmes in the Borinage, but he gave in to pressure from his parents to return home to Etten. He stayed there until around March 1880, which caused concern and frustration for his parents.
300.800982\"Theo died in January 1891, removing Vincent's most vocal and well-connected champion. Theo's widow Johanna van Gogh-Bonger was a Dutchwoman in her twenties who had not known either her husband or her brother-in-law very long and who suddenly had to take care of several hundreds of paintings, letters and drawings, as well as her infant son, Vincent Willem van Gogh. Gauguin was not inclined to offer assistance in promoting Van Gogh's reputation, and Johanna's brother Andries Bonger also seemed lukewarm about his work. Aurier, one of Van Gogh's earliest supporters among the critics, died of typhoid fever in 1892 at the age of 27.
In 1892, Émile Bernard organised a small solo show of Van Gogh's paintings in Paris, and Julien Tanguy exhibited his Van Gogh paintings with several consigned from Johanna van Gogh-Bonger. In April 1894, the Durand-Ruel Gallery in Paris agreed to take 10 paintings on consignment from Van Gogh's estate. In 1896, the Fauvist painter Henri Matisse, then an unknown art student, visited John Russell on Belle Île off Brittany. Russell had been a close friend of Van Gogh; he introduced Matisse to the Dutchman's work, and gave him a Van Gogh drawing. Influenced by Van Gogh, Matisse abandoned his earth-coloured palette for bright colours.In Paris in 1901, a large Van Gogh retrospective was held at the Bernheim-Jeune Gallery, which excited André Derain and Maurice de Vlaminck, and contributed to the emergence of Fauvism. Important group exhibitions took place with the Sonderbund artists in Cologne in 1912, the Armory Show, New York in 1913, and Berlin in 1914. Henk Bremmer was instrumental in teaching and talking about Van Gogh, and introduced Helene Kröller-Müller to Van Gogh's art; she became an avid collector of his work. The early figures in German Expressionism such as Emil Nolde acknowledged a debt to Van Gogh's work.
310.801200==== Orchards ====

The Flowering Orchards (also the Orchards in Blossom) are among the first groups of work completed after Van Gogh's arrival in Arles in February 1888. The 14 paintings are optimistic, joyous and visually expressive of the burgeoning spring. They are delicately sensitive and unpopulated. He painted swiftly, and although he brought to this series a version of Impressionism, a strong sense of personal style began to emerge during this period. The transience of the blossoming trees, and the passing of the season, seemed to align with his sense of impermanence and belief in a new beginning in Arles. During the blossoming of the trees that spring, he found \"a world of motifs that could not have been more Japanese\". Vincent wrote to Theo on 21 April 1888 that he had 10 orchards and \"one big [painting] of a cherry tree, which I've spoiled\".During this period Van Gogh mastered the use of light by subjugating shadows and painting the trees as if they are the source of light – almost in a sacred manner. Early the following year he painted another smaller group of orchards, including View of Arles, Flowering Orchards. Van Gogh was enthralled by the landscape and vegetation of the south of France, and often visited the farm gardens near Arles. In the vivid light of the Mediterranean climate his palette significantly brightened.
320.801701Within days he left for Amsterdam. Kee would not meet him, and her parents wrote that his \"persistence is disgusting\". In despair, he held his left hand in the flame of a lamp, with the words: \"Let me see her for as long as I can keep my hand in the flame.\" He did not recall the event well, but later assumed that his uncle had blown out the flame. Kee's father made it clear that her refusal should be heeded and that the two would not marry, largely because of Van Gogh's inability to support himself.Mauve took Van Gogh on as a student and introduced him to watercolour, which he worked on for the next month before returning home for Christmas. He quarrelled with his father, refusing to attend church, and left for The Hague. In January 1882, Mauve introduced him to painting in oil and lent him money to set up a studio. Within a month Van Gogh and Mauve fell out, possibly over the viability of drawing from plaster casts. Van Gogh could afford to hire only people from the street as models, a practice of which Mauve seems to have disapproved. In June, Van Gogh suffered a bout of gonorrhoea and spent three weeks in hospital. Soon after, he first painted in oils, bought with money borrowed from Theo. He liked the medium, and he spread the paint liberally, scraping from the canvas and working back with the brush. He wrote that he was surprised at how good the results were.
By March 1882, Mauve appeared to have gone cold towards Van Gogh, and he stopped replying to his letters. He had learned of Van Gogh's new domestic arrangement with an alcoholic prostitute, Clasina Maria \"Sien\" Hoornik (1850–1904), and her young daughter. Van Gogh had met Sien towards the end of January 1882, when she had a five-year-old daughter and was pregnant. She had previously borne two children who died, but Van Gogh was unaware of this. On 2 July, she gave birth to a baby boy, Willem.
330.803687He comments that this short period was the only time that Van Gogh's illness had a significant effect on his work. Van Gogh asked his mother and his brother to send him drawings and rough work he had done in the early 1880s so he could work on new paintings from his old sketches. Belonging to this period is Sorrowing Old Man (\"At Eternity's Gate\"), a colour study Hulsker describes as \"another unmistakable remembrance of times long past\". His late paintings show an artist at the height of his abilities, according to the art critic Robert Hughes, \"longing for concision and grace\".After the birth of his nephew, Van Gogh wrote, \"I started right away to make a picture for him, to hang in their bedroom, branches of white almond blossom against a blue sky.\"

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== 1890 Exhibitions and recognition ====
See also Vincent van Gogh's display at Les XX, 1890
Albert Aurier praised his work in the Mercure de France in January 1890 and described him as \"a genius\". In February, Van Gogh painted five versions of L'Arlésienne (Madame Ginoux), based on a charcoal sketch Gauguin had produced when she sat for both artists in November 1888. Also in February, Van Gogh was invited by Les XX, a society of avant-garde painters in Brussels, to participate in their annual exhibition. At the opening dinner a Les XX member, Henry de Groux, insulted Van Gogh's work. Toulouse-Lautrec demanded satisfaction, and Signac declared he would continue to fight for Van Gogh's honour if Lautrec surrendered. De Groux apologised for the slight and left the group.From 20 March to 27 April 1890, Van Gogh was included in the sixth exhibition of the Société des Artistes Indépendants in the Pavillon de la Ville de Paris on the Champs-Elysées. Van Gogh exhibited ten paintings. While the exhibition was on display with the Artistes Indépendants in Paris, Claude Monet said that Van Gogh's work was the best in the show.
340.804031He registered at the Académie in November 1880, where he studied anatomy and the standard rules of modelling and perspective.


=== Etten, Drenthe and The Hague ===

Van Gogh returned to Etten in April 1881 for an extended stay with his parents. He continued to draw, often using his neighbours as subjects. In August 1881, his recently widowed cousin, Cornelia \"Kee\" Vos-Stricker, daughter of his mother's older sister Willemina and Johannes Stricker, arrived for a visit. He was thrilled and took long walks with her. Kee was seven years older than he was and had an eight-year-old son. Van Gogh surprised everyone by declaring his love to her and proposing marriage. She refused with the words \"No, nay, never\" (\"nooit, neen, nimmer\"). After Kee returned to Amsterdam, Van Gogh went to The Hague to try to sell paintings and to meet with his second cousin, Anton Mauve. Mauve was the successful artist Van Gogh longed to be. Mauve invited him to return in a few months and suggested he spend the intervening time working in charcoal and pastels; Van Gogh returned to Etten and followed this advice.Late in November 1881, Van Gogh wrote a letter to Johannes Stricker, one which he described to Theo as an attack. Within days he left for Amsterdam. Kee would not meet him, and her parents wrote that his \"persistence is disgusting\". In despair, he held his left hand in the flame of a lamp, with the words: \"Let me see her for as long as I can keep my hand in the flame.\" He did not recall the event well, but later assumed that his uncle had blown out the flame. Kee's father made it clear that her refusal should be heeded and that the two would not marry, largely because of Van Gogh's inability to support himself.Mauve took Van Gogh on as a student and introduced him to watercolour, which he worked on for the next month before returning home for Christmas. He quarrelled with his father, refusing to attend church, and left for The Hague.
350.804749He turned to religion and spent time as a missionary in southern Belgium. Later he drifted into ill-health and solitude. He was keenly aware of modernist trends in art and, while back with his parents, took up painting in 1881. His younger brother, Theo, supported him financially, and the two of them maintained a long correspondence.
Van Gogh's early works consist of mostly still lifes and depictions of peasant laborers. In 1886, he moved to Paris, where he met members of the artistic avant-garde, including Émile Bernard and Paul Gauguin, who were seeking new paths beyond Impressionism. Frustrated in Paris and inspired by a growing spirit of artistic change and collaboration, in February 1888, Van Gogh moved to Arles in southern France to establish an artistic retreat and commune. Once there, Van Gogh's art changed. His paintings grew brighter and he turned his attention to the natural world, depicting local olive groves, wheat fields and sunflowers. Van Gogh invited Gauguin to join him in Arles and eagerly anticipated Gauguin's arrival in the fall of 1888.
Van Gogh suffered from psychotic episodes and delusions. Though he worried about his mental stability, he often neglected his physical health, did not eat properly and drank heavily. His friendship with Gauguin ended after a confrontation with a razor when, in a rage, he severed his left ear. Van Gogh spent time in psychiatric hospitals, including a period at Saint-Rémy. After he discharged himself and moved to the Auberge Ravoux in Auvers-sur-Oise near Paris, he came under the care of the homeopathic doctor Paul Gachet. His depression persisted, and on 27 July 1890, Van Gogh is believed to have shot himself in the chest with a revolver, dying from his injuries two days later.
Van Gogh's work began to attract critical artistic attention in the last year of his life. After his death, Van Gogh's art and life story captured public imagination as an emblem of misunderstood genius, due in large part to the efforts of his widowed sister-in-law Johanna van Gogh-Bonger.
360.805063There are 22 to his sister Wil, 58 to the painter Anthon van Rappard, 22 to Émile Bernard as well as individual letters to Paul Signac, Paul Gauguin, and the critic Albert Aurier. Some are illustrated with sketches. Many are undated, but art historians have been able to place most in chronological order. Problems in transcription and dating remain, mainly with those posted from Arles. While there, Vincent wrote around 200 letters in Dutch, French, and English. There is a gap in the record when he lived in Paris as the brothers lived together and had no need to correspond.The highly paid contemporary artist Jules Breton was frequently mentioned in Vincent's letters. In 1875 letters to Theo, Vincent mentions he saw Breton, discusses the Breton paintings he saw at a Salon, and discusses sending one of Breton's books but only on the condition that it be returned. In a March 1884 letter to Rappard he discusses one of Breton's poems that had inspired one of his paintings. In 1885 he describes Breton's famous work The Song of the Lark as being \"fine\". In March 1880, roughly midway between these letters, Van Gogh set out on an 80-kilometre trip on foot to meet with Breton in the village of Courrières; however, he was intimidated by Breton's success and/or the high wall around his estate. He turned around and returned without making his presence known. It appears Breton was unaware of Van Gogh or his attempted visit. There are no known letters between the two artists and Van Gogh is not one of the contemporary artists discussed by Breton in his 1891 autobiography Life of an Artist.


== Life ==


=== Early years ===

Vincent Willem van Gogh was born on 30 March 1853 in Groot-Zundert, in the predominantly Catholic province of North Brabant in the Netherlands. He was the oldest surviving child of Theodorus van Gogh (1822–1885), a minister of the Dutch Reformed Church, and his wife, Anna Cornelia Carbentus (1819–1907).
370.806510He became ill and run down by overwork, poor diet and excessive smoking. He started to attend drawing classes after plaster models at the Antwerp Academy on 18 January 1886. He quickly got into trouble with Charles Verlat, the director of the academy and teacher of a painting class, because of his unconventional painting style. Van Gogh had also clashed with the instructor of the drawing class Franz Vinck. Van Gogh finally started to attend the drawing classes after antique plaster models given by Eugène Siberdt. Soon Siberdt and Van Gogh came into conflict when the latter did not comply with Siberdt's requirement that drawings express the contour and concentrate on the line. When Van Gogh was required to draw the Venus de Milo during a drawing class, he produced the limbless, naked torso of a Flemish peasant woman. Siberdt regarded this as defiance against his artistic guidance and made corrections to Van Gogh's drawing with his crayon so vigorously that he tore the paper. Van Gogh then flew into a violent rage and shouted at Siberdt: 'You clearly do not know what a young woman is like, God damn it! A woman must have hips, buttocks, a pelvis in which she can carry a baby!' According to some accounts, this was the last time Van Gogh attended classes at the academy and he left later for Paris. On 31 March 1886, which was about a month after the confrontation with Siberdt, the teachers of the academy decided that 17 students, including Van Gogh, had to repeat a year. The story that Van Gogh was expelled from the academy by Siberdt is therefore unfounded.


==== Paris (1886–1888) ====

Van Gogh moved to Paris in March 1886 where he shared Theo's rue Laval apartment in Montmartre and studied at Fernand Cormon's studio. In June the brothers took a larger flat at 54 rue Lepic. In Paris, Vincent painted portraits of friends and acquaintances, still life paintings, views of Le Moulin de la Galette, scenes in Montmartre, Asnières and along the Seine.
380.807259He undertook, but also failed, a three-month course at a Protestant missionary school in Laken, near Brussels. In January 1879, he took up a post as a missionary at Petit-Wasmes in the working class, coal-mining district of Borinage in Belgium. To show support for his impoverished congregation, he gave up his comfortable lodgings at a bakery to a homeless person and moved to a small hut, where he slept on straw. His humble living conditions did not endear him to church authorities, who dismissed him for \"undermining the dignity of the priesthood\". He then walked the 75 kilometres (47 mi) to Brussels, returned briefly to Cuesmes in the Borinage, but he gave in to pressure from his parents to return home to Etten. He stayed there until around March 1880, which caused concern and frustration for his parents. His father was especially frustrated and advised that his son be committed to the lunatic asylum in Geel.Van Gogh returned to Cuesmes in August 1880, where he lodged with a miner until October. He became interested in the people and scenes around him, and he recorded them in drawings after Theo's suggestion that he take up art in earnest. He traveled to Brussels later in the year, to follow Theo's recommendation that he study with the Dutch artist Willem Roelofs, who persuaded him – in spite of his dislike of formal schools of art – to attend the Académie Royale des Beaux-Arts. He registered at the Académie in November 1880, where he studied anatomy and the standard rules of modelling and perspective.


=== Etten, Drenthe and The Hague ===

Van Gogh returned to Etten in April 1881 for an extended stay with his parents. He continued to draw, often using his neighbours as subjects. In August 1881, his recently widowed cousin, Cornelia \"Kee\" Vos-Stricker, daughter of his mother's older sister Willemina and Johannes Stricker, arrived for a visit. He was thrilled and took long walks with her. Kee was seven years older than he was and had an eight-year-old son. Van Gogh surprised everyone by declaring his love to her and proposing marriage.
390.816479==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that Van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\" This account is uncorroborated; Gauguin was almost certainly absent from the Yellow House that night, most likely staying in a hotel.After an altercation on the evening of 23 December 1888, Van Gogh returned to his room where he seemingly heard voices and either wholly or in part severed his left ear with a razor causing severe bleeding. He bandaged the wound, wrapped the ear in paper and delivered the package to a woman at a brothel Van Gogh and Gauguin both frequented. Van Gogh was found unconscious the next morning by a policeman and taken to hospital, where he was treated by Félix Rey, a young doctor still in training. The ear was brought to the hospital, but Rey did not attempt to reattach it as too much time had passed. Van Gogh researcher and art historian Bernadette Murphy discovered the true identity of the woman named Gabrielle, who died in Arles at the age of 80 in 1952, and whose descendants still lived (as of 2020) just outside Arles. Gabrielle, known in her youth as \"Gaby,\" was a 17-year-old cleaning girl at the brothel and other local establishments at the time Van Gogh presented her with his ear.Van Gogh had no recollection of the event, suggesting that he may have suffered an acute mental breakdown. The hospital diagnosis was \"acute mania with generalised delirium\", and within a few days, the local police ordered that he be placed in hospital care.
400.819207The ear was brought to the hospital, but Rey did not attempt to reattach it as too much time had passed. Van Gogh researcher and art historian Bernadette Murphy discovered the true identity of the woman named Gabrielle, who died in Arles at the age of 80 in 1952, and whose descendants still lived (as of 2020) just outside Arles. Gabrielle, known in her youth as \"Gaby,\" was a 17-year-old cleaning girl at the brothel and other local establishments at the time Van Gogh presented her with his ear.Van Gogh had no recollection of the event, suggesting that he may have suffered an acute mental breakdown. The hospital diagnosis was \"acute mania with generalised delirium\", and within a few days, the local police ordered that he be placed in hospital care. Gauguin immediately notified Theo, who, on 24 December, had proposed marriage to his old friend Andries Bonger's sister Johanna. That evening, Theo rushed to the station to board a night train to Arles. He arrived on Christmas Day and comforted Vincent, who seemed to be semi-lucid. That evening, he left Arles for the return trip to Paris.During the first days of his treatment, Van Gogh repeatedly and unsuccessfully asked for Gauguin, who asked a policeman attending the case to \"be kind enough, Monsieur, to awaken this man with great care, and if he asks for me tell him I have left for Paris; the sight of me might prove fatal for him.\" Gauguin fled Arles, never to see Van Gogh again. They continued to correspond, and in 1890, Gauguin proposed they form a studio in Antwerp. Meanwhile, other visitors to the hospital included Marie Ginoux and Roulin.Despite a pessimistic diagnosis, Van Gogh recovered and returned to the Yellow House on 7 January 1889. He spent the following month between hospital and home, suffering from hallucinations and delusions of poisoning. In March, the police closed his house after a petition by 30 townspeople (including the Ginoux family) who described him as le fou roux \"the redheaded madman\"; Van Gogh returned to hospital.
410.820484Two years later, Vincent and Theo paid for the publication of a book on Monticelli paintings, and Vincent bought some of Monticelli's works to add to his collection.Van Gogh learned about Fernand Cormon's atelier from Theo. He worked at the studio in April and May 1886, where he frequented the circle of the Australian artist John Russell, who painted his portrait in 1886. Van Gogh also met fellow students Émile Bernard, Louis Anquetin and Henri de Toulouse-Lautrec – who painted a portrait of him in pastel. They met at Julien \"Père\" Tanguy's paint shop, (which was, at that time, the only place where Paul Cézanne's paintings were displayed). In 1886, two large exhibitions were staged there, showing Pointillism and Neo-impressionism for the first time and bringing attention to Georges Seurat and Paul Signac. Theo kept a stock of Impressionist paintings in his gallery on boulevard Montmartre, but Van Gogh was slow to acknowledge the new developments in art.Conflicts arose between the brothers. At the end of 1886 Theo found living with Vincent to be \"almost unbearable\". By early 1887, they were again at peace, and Vincent had moved to Asnières, a northwestern suburb of Paris, where he got to know Signac. He adopted elements of Pointillism, a technique in which a multitude of small coloured dots are applied to the canvas so that when seen from a distance they create an optical blend of hues. The style stresses the ability of complementary colours – including blue and orange – to form vibrant contrasts.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
While in Asnières Van Gogh painted parks, restaurants and the Seine, including Bridges across the Seine at Asnières. In November 1887, Theo and Vincent befriended Paul Gauguin who had just arrived in Paris. Towards the end of the year, Vincent arranged an exhibition alongside Bernard, Anquetin, and probably Toulouse-Lautrec, at the Grand-Bouillon Restaurant du Chalet, 43 avenue de Clichy, Montmartre. In a contemporary account, Bernard wrote that the exhibition was ahead of anything else in Paris.
420.820913According to some accounts, this was the last time Van Gogh attended classes at the academy and he left later for Paris. On 31 March 1886, which was about a month after the confrontation with Siberdt, the teachers of the academy decided that 17 students, including Van Gogh, had to repeat a year. The story that Van Gogh was expelled from the academy by Siberdt is therefore unfounded.


==== Paris (1886–1888) ====

Van Gogh moved to Paris in March 1886 where he shared Theo's rue Laval apartment in Montmartre and studied at Fernand Cormon's studio. In June the brothers took a larger flat at 54 rue Lepic. In Paris, Vincent painted portraits of friends and acquaintances, still life paintings, views of Le Moulin de la Galette, scenes in Montmartre, Asnières and along the Seine. In 1885 in Antwerp he had become interested in Japanese ukiyo-e woodblock prints and had used them to decorate the walls of his studio; while in Paris he collected hundreds of them. He tried his hand at Japonaiserie, tracing a figure from a reproduction on the cover of the magazine Paris Illustre, The Courtesan or Oiran (1887), after Keisai Eisen, which he then graphically enlarged in a painting.After seeing the portrait of Adolphe Monticelli at the Galerie Delareybarette, Van Gogh adopted a brighter palette and a bolder attack, particularly in paintings such as his Seascape at Saintes-Maries (1888). Two years later, Vincent and Theo paid for the publication of a book on Monticelli paintings, and Vincent bought some of Monticelli's works to add to his collection.Van Gogh learned about Fernand Cormon's atelier from Theo. He worked at the studio in April and May 1886, where he frequented the circle of the Australian artist John Russell, who painted his portrait in 1886. Van Gogh also met fellow students Émile Bernard, Louis Anquetin and Henri de Toulouse-Lautrec – who painted a portrait of him in pastel.
430.823414==== Auvers-sur-Oise (May–July 1890) ====

In May 1890, Van Gogh left the clinic in Saint-Rémy to move nearer to both Dr Paul Gachet in the Paris suburb of Auvers-sur-Oise and to Theo. Gachet was an amateur painter and had treated several other artists – Camille Pissarro had recommended him. Van Gogh's first impression was that Gachet was \"iller than I am, it seemed to me, or let's say just as much.\"The painter Charles Daubigny moved to Auvers in 1861 and in turn drew other artists there, including Camille Corot and Honoré Daumier. In July 1890, Van Gogh completed two paintings of Daubigny's Garden, one of which is likely his final work.
During his last weeks at Saint-Rémy, his thoughts returned to \"memories of the North\", and several of the approximately 70 oils, painted during as many days in Auvers-sur-Oise, are reminiscent of northern scenes. In June 1890, he painted several portraits of his doctor, including Portrait of Dr Gachet, and his only etching. In each the emphasis is on Gachet's melancholic disposition. There are other paintings which are probably unfinished, including Thatched Cottages by a Hill.In July, Van Gogh wrote that he had become absorbed \"in the immense plain against the hills, boundless as the sea, delicate yellow\". He had first become captivated by the fields in May, when the wheat was young and green. In July, he described to Theo \"vast fields of wheat under turbulent skies\".He wrote that they represented his \"sadness and extreme loneliness\" and that the \"canvases will tell you what I cannot say in words, that is, how healthy and invigorating I find the countryside\". Wheatfield with Crows, although not his last oil work, is from July 1890 and Hulsker discusses it as being associated with \"melancholy and extreme loneliness\". Hulsker identifies seven oil paintings from Auvers that follow the completion of Wheatfield with Crows. Hulsker also expressed concern about the number of paintings attributed to Van Gogh from the period.
440.823965He adopted elements of Pointillism, a technique in which a multitude of small coloured dots are applied to the canvas so that when seen from a distance they create an optical blend of hues. The style stresses the ability of complementary colours – including blue and orange – to form vibrant contrasts.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
While in Asnières Van Gogh painted parks, restaurants and the Seine, including Bridges across the Seine at Asnières. In November 1887, Theo and Vincent befriended Paul Gauguin who had just arrived in Paris. Towards the end of the year, Vincent arranged an exhibition alongside Bernard, Anquetin, and probably Toulouse-Lautrec, at the Grand-Bouillon Restaurant du Chalet, 43 avenue de Clichy, Montmartre. In a contemporary account, Bernard wrote that the exhibition was ahead of anything else in Paris. There, Bernard and Anquetin sold their first paintings, and Van Gogh exchanged work with Gauguin. Discussions on art, artists, and their social situations started during this exhibition, continued and expanded to include visitors to the show, like Camille Pissarro and his son Lucien, Signac and Seurat. In February 1888, feeling worn out from life in Paris, Van Gogh left, having painted more than 200 paintings during his two years there. Hours before his departure, accompanied by Theo, he paid his first and only visit to Seurat in his studio.


=== Artistic breakthrough ===


==== Arles (1888–89) ====

Ill from drink and suffering from smoker's cough, in February 1888, Van Gogh sought refuge in Arles. He seems to have moved with thoughts of founding an art colony. The Danish artist Christian Mourier-Petersen was his companion for two months and at first, Arles appeared exotic to Van Gogh. In a letter, he described it as a foreign country: \"The Zouaves, the brothels, the adorable little Arlésienne going to her First Communion, the priest in his surplice, who looks like a dangerous rhinoceros, the people drinking absinthe, all seem to me creatures from another world.
450.828397In April, he was visited by the American artist Dodge MacKnight, who was living nearby at Fontvieille.On 1 May 1888, Van Gogh signed a lease for four rooms in the Yellow House. The house at 2 place Lamartine cost 15 francs per month. The rooms were unfurnished and had been uninhabited for months. Because the Yellow House had to be furnished before he could fully move in, Van Gogh moved from the Hôtel Carrel to the Café de la Gare on 7 May 1888. He had befriended the Yellow House's proprietors, Joseph and Marie Ginoux, and was able to use it as a studio. Van Gogh wanted a gallery to display his work and started a series of paintings that eventually included Van Gogh's Chair (1888), Bedroom in Arles (1888), The Night Café (1888), Café Terrace at Night (September 1888), Starry Night Over the Rhone (1888), and Still Life: Vase with Twelve Sunflowers (1888), all intended for the decoration for the Yellow House.Van Gogh wrote that with The Night Café he tried \"to express the idea that the café is a place where one can ruin oneself, go mad, or commit a crime\". When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.
460.838895Hours before his departure, accompanied by Theo, he paid his first and only visit to Seurat in his studio.


=== Artistic breakthrough ===


==== Arles (1888–89) ====

Ill from drink and suffering from smoker's cough, in February 1888, Van Gogh sought refuge in Arles. He seems to have moved with thoughts of founding an art colony. The Danish artist Christian Mourier-Petersen was his companion for two months and at first, Arles appeared exotic to Van Gogh. In a letter, he described it as a foreign country: \"The Zouaves, the brothels, the adorable little Arlésienne going to her First Communion, the priest in his surplice, who looks like a dangerous rhinoceros, the people drinking absinthe, all seem to me creatures from another world.\"The time in Arles was one of Van Gogh's more prolific periods: he completed 200 paintings and more than 100 drawings and watercolors. He was energized by the local countryside and light; his works from this period are rich in yellow, ultramarine and mauve. They include harvests, wheat fields and general rural landmarks from the area, including The Old Mill (1888), one of seven canvases sent to Pont-Aven on 4 October 1888 in an exchange of works with Paul Gauguin, Émile Bernard, Charles Laval and others.In March 1888, Van Gogh created landscapes using a gridded \"perspective frame\"and three of those works were shown at the annual exhibition of the Société des Artistes Indépendants. In April, he was visited by the American artist Dodge MacKnight, who was living nearby at Fontvieille.On 1 May 1888, Van Gogh signed a lease for four rooms in the Yellow House. The house at 2 place Lamartine cost 15 francs per month. The rooms were unfurnished and had been uninhabited for months. Because the Yellow House had to be furnished before he could fully move in, Van Gogh moved from the Hôtel Carrel to the Café de la Gare on 7 May 1888.
470.848972When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\"When Boch visited again, Van Gogh painted a portrait of him, as well as the study The Poet Against a Starry Sky.In preparation for Gauguin's visit, Van Gogh bought two beds on advice from the station's postal supervisor Joseph Roulin, whose portrait he painted. On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps.
480.849399On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of Van Gogh.Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; Van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated Van Gogh. They often quarrelled; Van Gogh increasingly feared that Gauguin was going to desert him, and the situation, which Van Gogh described as one of \"excessive tension\", rapidly headed towards crisis point.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that Van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\"
490.851540Gauguin fled Arles, never to see Van Gogh again. They continued to correspond, and in 1890, Gauguin proposed they form a studio in Antwerp. Meanwhile, other visitors to the hospital included Marie Ginoux and Roulin.Despite a pessimistic diagnosis, Van Gogh recovered and returned to the Yellow House on 7 January 1889. He spent the following month between hospital and home, suffering from hallucinations and delusions of poisoning. In March, the police closed his house after a petition by 30 townspeople (including the Ginoux family) who described him as le fou roux \"the redheaded madman\"; Van Gogh returned to hospital. Paul Signac visited him twice in March; in April, Van Gogh moved into rooms owned by Dr Rey after floods damaged paintings in his own home. Two months later, he left Arles and voluntarily entered an asylum in Saint-Rémy-de-Provence. Around this time, he wrote, \"Sometimes moods of indescribable anguish, sometimes moments when the veil of time and fatality of circumstances seemed to be torn apart for an instant.\"Van Gogh gave his 1889 Portrait of Doctor Félix Rey to Dr Rey. The physician was not fond of the painting and used it to repair a chicken coop, then gave it away. In 2016, the portrait was housed at the Pushkin Museum of Fine Arts and estimated to be worth over $50 million.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Saint-Rémy (May 1889 – May 1890) ====

Van Gogh entered the Saint-Paul-de-Mausole asylum on 8 May 1889, accompanied by his caregiver, Frédéric Salles, a Protestant clergyman. Saint-Paul was a former monastery in Saint-Rémy, located less than 30 kilometres (19 mi) from Arles, and it was run by a former naval doctor, Théophile Peyron. Van Gogh had two cells with barred windows, one of which he used as a studio. The clinic and its garden became the main subjects of his paintings.
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "new_nodes = get_retrieved_nodes_mixed(\n", - " \"Which date did Paul Gauguin arrive in Arles?\",\n", - " vector_top_k=50,\n", - " with_reranker=False,\n", + " with_reranker=True,\n", + " model=\"rank_vicuna\",\n", ")\n", "\n", - "visualize_retrieved_nodes(new_nodes)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Retrieve and Rerank reversed top 50 results using RankZephyr and return top 3\n", - "\n", - "The sliding window size is 20, with a step size of 10." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n", - "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "2c4dcb9f943a4895a338039e11e85b04", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Loading checkpoint shards: 0%| | 0/3 [00:00\n", - " \n", - " \n", - " \n", - " Score\n", - " Text\n", - " \n", - " \n", - " \n", - " \n", - " 0\n", - " 0.849399\n", - " On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps. The single painting Gauguin completed during his visit was his portrait of Van Gogh.Van Gogh and Gauguin visited Montpellier in December 1888, where they saw works by Courbet and Delacroix in the Musée Fabre. Their relationship began to deteriorate; Van Gogh admired Gauguin and wanted to be treated as his equal, but Gauguin was arrogant and domineering, which frustrated Van Gogh. They often quarrelled; Van Gogh increasingly feared that Gauguin was going to desert him, and the situation, which Van Gogh described as one of \"excessive tension\", rapidly headed towards crisis point.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Hospital in Arles (December 1888) ====

The exact sequence that led to the mutilation of van Gogh's ear is not known. Gauguin said, fifteen years later, that the night followed several instances of physically threatening behaviour. Their relationship was complex and Theo may have owed money to Gauguin, who suspected the brothers were exploiting him financially. It seems likely that Vincent realised that Gauguin was planning to leave. The following days saw heavy rain, leading to the two men being shut in the Yellow House. Gauguin recalled that Van Gogh followed him after he left for a walk and \"rushed towards me, an open razor in his hand.\"\n", - " \n", - " \n", - " 1\n", - " 0.848972\n", - " When he visited Saintes-Maries-de-la-Mer in June, he gave lessons to a Zouave second lieutenant – Paul-Eugène Milliet – and painted boats on the sea and the village. MacKnight introduced Van Gogh to Eugène Boch, a Belgian painter who sometimes stayed in Fontvieille, and the two exchanged visits in July.

\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t
\\t\\t
\\t\\t\\t
\\t\\t\\t
\\t\\t


==== Gauguin's visit (1888) ====

When Gauguin agreed to visit Arles in 1888, Van Gogh hoped for friendship and to realize his idea of an artists' collective. Van Gogh prepared for Gauguin's arrival by painting four versions of Sunflowers in one week. \"In the hope of living in a studio of our own with Gauguin,\" he wrote in a letter to Theo, \"I'd like to do a decoration for the studio. Nothing but large Sunflowers.\"When Boch visited again, Van Gogh painted a portrait of him, as well as the study The Poet Against a Starry Sky.In preparation for Gauguin's visit, Van Gogh bought two beds on advice from the station's postal supervisor Joseph Roulin, whose portrait he painted. On 17 September, he spent his first night in the still sparsely furnished Yellow House. When Gauguin consented to work and live in Arles with him, Van Gogh started to work on the Décoration for the Yellow House, probably the most ambitious effort he ever undertook. He completed two chair paintings: Van Gogh's Chair and Gauguin's Chair.After much pleading from Van Gogh, Gauguin arrived in Arles on 23 October and, in November, the two painted together. Gauguin depicted Van Gogh in his The Painter of Sunflowers; Van Gogh painted pictures from memory, following Gauguin's suggestion. Among these \"imaginative\" paintings is Memory of the Garden at Etten. Their first joint outdoor venture was at the Alyscamps, when they produced the pendants Les Alyscamps.\n", - " \n", - " \n", - " 2\n", - " 0.804749\n", - " He turned to religion and spent time as a missionary in southern Belgium. Later he drifted into ill-health and solitude. He was keenly aware of modernist trends in art and, while back with his parents, took up painting in 1881. His younger brother, Theo, supported him financially, and the two of them maintained a long correspondence.
Van Gogh's early works consist of mostly still lifes and depictions of peasant laborers. In 1886, he moved to Paris, where he met members of the artistic avant-garde, including Émile Bernard and Paul Gauguin, who were seeking new paths beyond Impressionism. Frustrated in Paris and inspired by a growing spirit of artistic change and collaboration, in February 1888, Van Gogh moved to Arles in southern France to establish an artistic retreat and commune. Once there, Van Gogh's art changed. His paintings grew brighter and he turned his attention to the natural world, depicting local olive groves, wheat fields and sunflowers. Van Gogh invited Gauguin to join him in Arles and eagerly anticipated Gauguin's arrival in the fall of 1888.
Van Gogh suffered from psychotic episodes and delusions. Though he worried about his mental stability, he often neglected his physical health, did not eat properly and drank heavily. His friendship with Gauguin ended after a confrontation with a razor when, in a rage, he severed his left ear. Van Gogh spent time in psychiatric hospitals, including a period at Saint-Rémy. After he discharged himself and moved to the Auberge Ravoux in Auvers-sur-Oise near Paris, he came under the care of the homeopathic doctor Paul Gachet. His depression persisted, and on 27 July 1890, Van Gogh is believed to have shot himself in the chest with a revolver, dying from his injuries two days later.
Van Gogh's work began to attract critical artistic attention in the last year of his life. After his death, Van Gogh's art and life story captured public imagination as an emblem of misunderstood genius, due in large part to the efforts of his widowed sister-in-law Johanna van Gogh-Bonger.\n", - " \n", - " \n", - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "new_nodes = get_retrieved_nodes_mixed(\n", + "# Using RankGPT\n", + "new_nodes = get_retrieved_nodes(\n", " \"Which date did Paul Gauguin arrive in Arles?\",\n", - " vector_top_k=50,\n", + " vector_top_k=10,\n", " reranker_top_n=3,\n", " with_reranker=True,\n", - " with_retrieval=False,\n", - " model=\"zephyr\",\n", - " step_size=10,\n", + " model=\"gpt-3.5-turbo\",\n", ")\n", "\n", "visualize_retrieved_nodes(new_nodes)" @@ -1006,15 +435,18 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### The correct result is ranked 1st/50 after RankZephyr rerank. " + "#### For other models, use `model=`\n", + "- `monot5` for MonoT5 pointwise reranker\n", + "- `castorini/LiT5-Distill-base` for LiT5 distill reranker\n", + "- `castorini/LiT5-Score-base` for LiT5 score reranker" ] } ], "metadata": { "kernelspec": { - "display_name": "rankllm", + "display_name": "main", "language": "python", - "name": "rankllm" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/docs/docs/examples/objects/object_index.ipynb b/docs/docs/examples/objects/object_index.ipynb index e3b99bd326dcd..0934b30a78558 100644 --- a/docs/docs/examples/objects/object_index.ipynb +++ b/docs/docs/examples/objects/object_index.ipynb @@ -5,7 +5,7 @@ "id": "2f43c205", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/AimCallback.ipynb b/docs/docs/examples/observability/AimCallback.ipynb index 22f55128f826f..3b6708a4a36a1 100644 --- a/docs/docs/examples/observability/AimCallback.ipynb +++ b/docs/docs/examples/observability/AimCallback.ipynb @@ -6,7 +6,7 @@ "id": "b4698442", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/HoneyHiveLlamaIndexTracer.ipynb b/docs/docs/examples/observability/HoneyHiveLlamaIndexTracer.ipynb index fe0277c2fee11..c71797b0883f2 100644 --- a/docs/docs/examples/observability/HoneyHiveLlamaIndexTracer.ipynb +++ b/docs/docs/examples/observability/HoneyHiveLlamaIndexTracer.ipynb @@ -6,7 +6,7 @@ "id": "13d2b729", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/LangfuseCallbackHandler.ipynb b/docs/docs/examples/observability/LangfuseCallbackHandler.ipynb index 539c16e6e00b5..fcf331708f49a 100644 --- a/docs/docs/examples/observability/LangfuseCallbackHandler.ipynb +++ b/docs/docs/examples/observability/LangfuseCallbackHandler.ipynb @@ -6,7 +6,7 @@ "id": "d6509c3a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/LlamaDebugHandler.ipynb b/docs/docs/examples/observability/LlamaDebugHandler.ipynb index 322244d026fe8..00a11606c37d4 100644 --- a/docs/docs/examples/observability/LlamaDebugHandler.ipynb +++ b/docs/docs/examples/observability/LlamaDebugHandler.ipynb @@ -5,7 +5,7 @@ "id": "dcab9129", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/OpenInferenceCallback.ipynb b/docs/docs/examples/observability/OpenInferenceCallback.ipynb index 94457e4f48bde..82cdc19490c3e 100644 --- a/docs/docs/examples/observability/OpenInferenceCallback.ipynb +++ b/docs/docs/examples/observability/OpenInferenceCallback.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/OpenLLMetry.ipynb b/docs/docs/examples/observability/OpenLLMetry.ipynb index 6cc030ed64892..90946081daa60 100644 --- a/docs/docs/examples/observability/OpenLLMetry.ipynb +++ b/docs/docs/examples/observability/OpenLLMetry.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/OpikCallback.ipynb b/docs/docs/examples/observability/OpikCallback.ipynb new file mode 100644 index 0000000000000..67a2fb456aa0c --- /dev/null +++ b/docs/docs/examples/observability/OpikCallback.ipynb @@ -0,0 +1,215 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Logging traces with Opik\n", + "\n", + "For this guide we will be downloading the essays from Paul Graham and use them as our data source. We will then start querying these essays with LlamaIndex and logging the traces to Opik.\n", + "\n", + "## Creating an account on Comet.com\n", + "\n", + "[Comet](https://www.comet.com/site) provides a hosted version of the Opik platform, [simply create an account](https://www.comet.com/signup?from=llm) and grab you API Key.\n", + "\n", + "> You can also run the Opik platform locally, see the [installation guide](https://www.comet.com/docs/opik/self-host/self_hosting_opik/) for more information." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import getpass\n", + "\n", + "if \"OPIK_API_KEY\" not in os.environ:\n", + " os.environ[\"OPIK_API_KEY\"] = getpass.getpass(\"Opik API Key: \")\n", + "if \"OPIK_WORKSPACE\" not in os.environ:\n", + " os.environ[\"OPIK_WORKSPACE\"] = input(\n", + " \"Comet workspace (often the same as your username): \"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If you are running the Opik platform locally, simply set:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# import os\n", + "# os.environ[\"OPIK_URL_OVERRIDE\"] = \"http://localhost:5173/api\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Preparing our environment\n", + "\n", + "First, we will install the necessary libraries, download the Chinook database and set up our different API keys." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install opik llama-index llama-index-agent-openai llama-index-llms-openai --upgrade --quiet" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And configure the required environment variables:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import getpass\n", + "\n", + "if \"OPENAI_API_KEY\" not in os.environ:\n", + " os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\n", + " \"Enter your OpenAI API key: \"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In addition, we will download the Paul Graham essays:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import requests\n", + "\n", + "# Create directory if it doesn't exist\n", + "os.makedirs(\"./data/paul_graham/\", exist_ok=True)\n", + "\n", + "# Download the file using requests\n", + "url = \"https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\"\n", + "response = requests.get(url)\n", + "with open(\"./data/paul_graham/paul_graham_essay.txt\", \"wb\") as f:\n", + " f.write(response.content)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using LlamaIndex\n", + "\n", + "### Configuring the Opik integration\n", + "\n", + "You can use the Opik callback directly by calling:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import set_global_handler\n", + "\n", + "# You should provide your OPIK API key and Workspace using the following environment variables:\n", + "# OPIK_API_KEY, OPIK_WORKSPACE\n", + "set_global_handler(\n", + " \"opik\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that the callback handler is configured, all traces will automatically be logged to Opik.\n", + "\n", + "### Using LLamaIndex\n", + "\n", + "The first step is to load the data into LlamaIndex. We will use the `SimpleDirectoryReader` to load the data from the `data/paul_graham` directory. We will also create the vector store to index all the loaded documents." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex, SimpleDirectoryReader\n", + "\n", + "documents = SimpleDirectoryReader(\"./data/paul_graham\").load_data()\n", + "index = VectorStoreIndex.from_documents(documents)\n", + "query_engine = index.as_query_engine()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now query the index using the `query_engine` object:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = query_engine.query(\"What did the author do growing up?\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can now go to the Opik app to see the trace:\n", + "\n", + "![LlamaIndex trace in Opik](https://raw.githubusercontent.com/comet-ml/opik/main/apps/opik-documentation/documentation/static/img/cookbook/llamaIndex_cookbook.png)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "py312_llm_eval", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/examples/observability/PromptLayerHandler.ipynb b/docs/docs/examples/observability/PromptLayerHandler.ipynb index 92646aafb1e2a..bffd6b6beba05 100644 --- a/docs/docs/examples/observability/PromptLayerHandler.ipynb +++ b/docs/docs/examples/observability/PromptLayerHandler.ipynb @@ -5,7 +5,7 @@ "id": "dcab9129", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/TokenCountingHandler.ipynb b/docs/docs/examples/observability/TokenCountingHandler.ipynb index c65fc78d2ad8f..a424aca3ae634 100644 --- a/docs/docs/examples/observability/TokenCountingHandler.ipynb +++ b/docs/docs/examples/observability/TokenCountingHandler.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/UpTrainCallback.ipynb b/docs/docs/examples/observability/UpTrainCallback.ipynb index eb1bddf1f7202..aa04b60037ab2 100644 --- a/docs/docs/examples/observability/UpTrainCallback.ipynb +++ b/docs/docs/examples/observability/UpTrainCallback.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/observability/WandbCallbackHandler.ipynb b/docs/docs/examples/observability/WandbCallbackHandler.ipynb index 15e3801aafb59..cd1a5d565dac1 100644 --- a/docs/docs/examples/observability/WandbCallbackHandler.ipynb +++ b/docs/docs/examples/observability/WandbCallbackHandler.ipynb @@ -6,7 +6,7 @@ "id": "d6509c3a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/output_parsing/nvidia_output_parsing.ipynb b/docs/docs/examples/output_parsing/nvidia_output_parsing.ipynb new file mode 100644 index 0000000000000..501e5b0dbfd4c --- /dev/null +++ b/docs/docs/examples/output_parsing/nvidia_output_parsing.ipynb @@ -0,0 +1,660 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "30815a85", + "metadata": {}, + "source": [ + "# LLM Pydantic Program - NVIDIA" + ] + }, + { + "cell_type": "markdown", + "id": "311e16cb", + "metadata": {}, + "source": [ + "This guide shows you how to generate structured data with our `LLMTextCompletionProgram`. Given an LLM as well as an output Pydantic class, generate a structured Pydantic object.\n", + "\n", + "In terms of the target object, you can choose to directly specify `output_cls`, or specify a `PydanticOutputParser` or any other BaseOutputParser that generates a Pydantic object.\n", + "\n", + "in the examples below, we show you different ways of extracting into the `Album` object (which can contain a list of Song objects)" + ] + }, + { + "cell_type": "markdown", + "id": "e0611198", + "metadata": {}, + "source": [ + "## Extract into `Album` class\n", + "\n", + "This is a simple example of parsing an output into an `Album` schema, which can contain multiple songs.\n", + "\n", + "Just pass `Album` into the `output_cls` property on initialization of the `LLMTextCompletionProgram`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "511a8171", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install llama-index-readers-file llama-index-embeddings-nvidia llama-index-llms-nvidia" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b029b7e6", + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "# del os.environ['NVIDIA_API_KEY'] ## delete key and reset\n", + "if os.environ.get(\"NVIDIA_API_KEY\", \"\").startswith(\"nvapi-\"):\n", + " print(\"Valid NVIDIA_API_KEY already in environment. Delete to reset\")\n", + "else:\n", + " nvapi_key = getpass.getpass(\"NVAPI Key (starts with nvapi-): \")\n", + " assert nvapi_key.startswith(\n", + " \"nvapi-\"\n", + " ), f\"{nvapi_key[:5]}... is not a valid key\"\n", + " os.environ[\"NVIDIA_API_KEY\"] = nvapi_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f7a83b49-5c34-45d5-8cf4-62f348fb1299", + "metadata": {}, + "outputs": [], + "source": [ + "from pydantic import BaseModel\n", + "from typing import List\n", + "from llama_index.core import Settings\n", + "from llama_index.llms.nvidia import NVIDIA\n", + "from llama_index.embeddings.nvidia import NVIDIAEmbedding\n", + "from llama_index.core.program import LLMTextCompletionProgram\n", + "from llama_index.core.program import FunctionCallingProgram" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4e4fc4b9", + "metadata": {}, + "outputs": [], + "source": [ + "llm = NVIDIA()\n", + "\n", + "embedder = NVIDIAEmbedding(model=\"NV-Embed-QA\", truncate=\"END\")\n", + "Settings.embed_model = embedder\n", + "Settings.llm = llm" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d92e739", + "metadata": {}, + "outputs": [], + "source": [ + "class Song(BaseModel):\n", + " \"\"\"Data model for a song.\"\"\"\n", + "\n", + " title: str\n", + " length_seconds: int\n", + "\n", + "\n", + "class Album(BaseModel):\n", + " \"\"\"Data model for an album.\"\"\"\n", + "\n", + " name: str\n", + " artist: str\n", + " songs: List[Song]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46c2d509", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_template_str = \"\"\"\\\n", + "Generate an example album, with an artist and a list of songs. \\\n", + "Using the movie {movie_name} as inspiration.\\\n", + "\"\"\"\n", + "program = LLMTextCompletionProgram.from_defaults(\n", + " output_cls=Album,\n", + " prompt_template_str=prompt_template_str,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "498370f4", + "metadata": {}, + "source": [ + "Run program to get structured output. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ca490bf8", + "metadata": {}, + "outputs": [], + "source": [ + "output = program(movie_name=\"The Shining\")" + ] + }, + { + "cell_type": "markdown", + "id": "40cce83f", + "metadata": {}, + "source": [ + "The output is a valid Pydantic object that we can then use to call functions/APIs. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53934d3d", + "metadata": {}, + "outputs": [], + "source": [ + "output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6401ab8d", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.output_parsers import PydanticOutputParser\n", + "\n", + "program = LLMTextCompletionProgram.from_defaults(\n", + " output_parser=PydanticOutputParser(output_cls=Album),\n", + " prompt_template_str=prompt_template_str,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1adc5b2b", + "metadata": {}, + "outputs": [], + "source": [ + "output = program(movie_name=\"Lord of the Rings\")\n", + "output" + ] + }, + { + "cell_type": "markdown", + "id": "a41391d9", + "metadata": {}, + "source": [ + "## Define a Custom Output Parser\n", + "\n", + "Sometimes you may want to parse an output your own way into a JSON object. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "60b7b669", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.output_parsers import ChainableOutputParser\n", + "\n", + "\n", + "class CustomAlbumOutputParser(ChainableOutputParser):\n", + " \"\"\"Custom Album output parser.\n", + "\n", + " Assume first line is name and artist.\n", + "\n", + " Assume each subsequent line is the song.\n", + "\n", + " \"\"\"\n", + "\n", + " def __init__(self, verbose: bool = False):\n", + " self.verbose = verbose\n", + "\n", + " def parse(self, output: str) -> Album:\n", + " \"\"\"Parse output.\"\"\"\n", + " if self.verbose:\n", + " print(f\"> Raw output: {output}\")\n", + " lines = output.split(\"\\n\")\n", + " lines = list(filter(None, (line.strip() for line in lines)))\n", + " name, artist = lines[1].split(\",\")\n", + " songs = []\n", + " for i in range(2, len(lines)):\n", + " title, length_seconds = lines[i].split(\",\")\n", + " songs.append(Song(title=title, length_seconds=length_seconds))\n", + "\n", + " return Album(name=name, artist=artist, songs=songs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e165a0f9", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_template_str = \"\"\"\\\n", + "Generate an example album, with an artist and a list of songs. \\\n", + "Using the movie {movie_name} as inspiration.\\\n", + "\n", + "Return answer in following format.\n", + "The first line is:\n", + ", \n", + "Every subsequent line is a song with format:\n", + ", \n", + "\n", + "\"\"\"\n", + "program = LLMTextCompletionProgram.from_defaults(\n", + " output_parser=CustomAlbumOutputParser(verbose=True),\n", + " output_cls=Album,\n", + " prompt_template_str=prompt_template_str,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7a743006", + "metadata": {}, + "outputs": [], + "source": [ + "output = program(movie_name=\"The Dark Knight\")\n", + "print(output)" + ] + }, + { + "cell_type": "markdown", + "id": "ade8b979", + "metadata": {}, + "source": [ + "# Function Calling Program for Structured Extraction\n", + "\n", + "This guide shows you how to do structured data extraction with our `FunctionCallingProgram`. Given a function-calling LLM as well as an output Pydantic class, generate a structured Pydantic object.\n", + "\n", + "in the examples below, we show you different ways of extracting into the `Album` object (which can contain a list of Song objects).\n", + "\n", + "**NOTE**: The `FunctionCallingProgram` only works with LLMs that natively support function calling, by inserting the schema of the Pydantic object as the \"tool parameters\" for a tool. For all other LLMs, please use our `LLMTextCompletionProgram`, which will directly prompt the model through text to get back a structured output." + ] + }, + { + "cell_type": "markdown", + "id": "6311f7ae", + "metadata": {}, + "source": [ + "### Without docstring in Model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd22dce2", + "metadata": {}, + "outputs": [], + "source": [ + "llm = NVIDIA(model=\"meta/llama-3.1-8b-instruct\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "42053ea8-2580-4639-9dcf-566e8427c44e", + "metadata": {}, + "outputs": [], + "source": [ + "class Song(BaseModel):\n", + " title: str\n", + " length_seconds: int\n", + "\n", + "\n", + "class Album(BaseModel):\n", + " name: str\n", + " artist: str\n", + " songs: List[Song]" + ] + }, + { + "cell_type": "markdown", + "id": "4afff44e-a746-4b9f-85a9-72058bcdd29f", + "metadata": {}, + "source": [ + "Define pydantic program" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe756697-c299-4f9a-a108-944b6693f824", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_template_str = \"\"\"\\\n", + "Generate an example album, with an artist and a list of songs. \\\n", + "Using the movie {movie_name} as inspiration.\\\n", + "\"\"\"\n", + "\n", + "program = FunctionCallingProgram.from_defaults(\n", + " output_cls=Album,\n", + " prompt_template_str=prompt_template_str,\n", + " verbose=True,\n", + " llm=llm,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "b7be01dc-433e-4485-bab0-36a04c3afbcb", + "metadata": {}, + "source": [ + "Run program to get structured output. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25d02228-2907-4810-932e-83ec9fc71f6b", + "metadata": {}, + "outputs": [], + "source": [ + "output = program(\n", + " movie_name=\"The Shining\", description=\"Data model for an album.\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "4c2af9a5", + "metadata": {}, + "source": [ + "### With docstring in Model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35c01bec", + "metadata": {}, + "outputs": [], + "source": [ + "class Song(BaseModel):\n", + " \"\"\"Data model for a song.\"\"\"\n", + "\n", + " title: str\n", + " length_seconds: int\n", + "\n", + "\n", + "class Album(BaseModel):\n", + " \"\"\"Data model for an album.\"\"\"\n", + "\n", + " name: str\n", + " artist: str\n", + " songs: List[Song]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22268e2a", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_template_str = \"\"\"\\\n", + "Generate an example album, with an artist and a list of songs. \\\n", + "Using the movie {movie_name} as inspiration.\\\n", + "\"\"\"\n", + "program = FunctionCallingProgram.from_defaults(\n", + " output_cls=Album,\n", + " prompt_template_str=prompt_template_str,\n", + " verbose=True,\n", + " llm=llm,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "9411d0f1", + "metadata": {}, + "source": [ + "Run program to get structured output. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "066e9c2d", + "metadata": {}, + "outputs": [], + "source": [ + "output = program(movie_name=\"The Shining\")" + ] + }, + { + "cell_type": "markdown", + "id": "27ec0777-28d5-494b-b419-daf6bce2b20e", + "metadata": {}, + "source": [ + "The output is a valid Pydantic object that we can then use to call functions/APIs. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3e51bcf4-e7df-47b9-b380-8e5b900a31e1", + "metadata": {}, + "outputs": [], + "source": [ + "output" + ] + }, + { + "cell_type": "markdown", + "id": "9eaa7c0f", + "metadata": {}, + "source": [ + "# Langchain Output Parsing" + ] + }, + { + "cell_type": "markdown", + "id": "62796f28", + "metadata": {}, + "source": [ + "Download Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dca92bdc", + "metadata": {}, + "outputs": [], + "source": [ + "!mkdir -p 'data/paul_graham/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'" + ] + }, + { + "cell_type": "markdown", + "id": "92652c87", + "metadata": {}, + "source": [ + "#### Load documents, build the VectorStoreIndex" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2b4cc4c5", + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import sys\n", + "\n", + "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n", + "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n", + "\n", + "from llama_index.core import VectorStoreIndex, SimpleDirectoryReader\n", + "from IPython.display import Markdown, display" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f102b812", + "metadata": {}, + "outputs": [], + "source": [ + "# load documents\n", + "documents = SimpleDirectoryReader(\"./data/paul_graham/\").load_data()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "704b9386", + "metadata": {}, + "outputs": [], + "source": [ + "index = VectorStoreIndex.from_documents(documents, chunk_size=512)" + ] + }, + { + "cell_type": "markdown", + "id": "8d38ce4e", + "metadata": {}, + "source": [ + "#### Define Query + Langchain Output Parser" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c8c0beb7", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.output_parsers import LangchainOutputParser\n", + "from langchain.output_parsers import StructuredOutputParser, ResponseSchema" + ] + }, + { + "cell_type": "markdown", + "id": "a2cc3956", + "metadata": {}, + "source": [ + "**Define custom QA and Refine Prompts**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1b20fec", + "metadata": {}, + "outputs": [], + "source": [ + "response_schemas = [\n", + " ResponseSchema(\n", + " name=\"Education\",\n", + " description=(\n", + " \"Describes the author's educational experience/background.\"\n", + " ),\n", + " ),\n", + " ResponseSchema(\n", + " name=\"Work\",\n", + " description=\"Describes the author's work experience/background.\",\n", + " ),\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3c475d87", + "metadata": {}, + "outputs": [], + "source": [ + "lc_output_parser = StructuredOutputParser.from_response_schemas(\n", + " response_schemas\n", + ")\n", + "output_parser = LangchainOutputParser(lc_output_parser)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc2b558d", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.prompts.default_prompts import (\n", + " DEFAULT_TEXT_QA_PROMPT_TMPL,\n", + ")\n", + "\n", + "# take a look at the new QA template!\n", + "fmt_qa_tmpl = output_parser.format(DEFAULT_TEXT_QA_PROMPT_TMPL)\n", + "print(fmt_qa_tmpl)" + ] + }, + { + "cell_type": "markdown", + "id": "e02bf2bc", + "metadata": {}, + "source": [ + "#### Query Index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff44ad90", + "metadata": {}, + "outputs": [], + "source": [ + "query_engine = index.as_query_engine(\n", + " llm=llm,\n", + ")\n", + "response = query_engine.query(\n", + " \"What are a few things the author did growing up?\",\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "llama-index-vs8PXMh0-py3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/examples/param_optimizer/param_optimizer.ipynb b/docs/docs/examples/param_optimizer/param_optimizer.ipynb index 542d48d4676a5..aa2c64e8b41ea 100644 --- a/docs/docs/examples/param_optimizer/param_optimizer.ipynb +++ b/docs/docs/examples/param_optimizer/param_optimizer.ipynb @@ -7,7 +7,7 @@ "source": [ "# [WIP] Hyperparameter Optimization for RAG\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "In this guide we show you how to do hyperparameter optimization for RAG.\n", "\n", diff --git a/docs/docs/examples/property_graph/agentic_graph_rag_vertex.ipynb b/docs/docs/examples/property_graph/agentic_graph_rag_vertex.ipynb index 4fe56f0aeb212..578e2920378ad 100644 --- a/docs/docs/examples/property_graph/agentic_graph_rag_vertex.ipynb +++ b/docs/docs/examples/property_graph/agentic_graph_rag_vertex.ipynb @@ -212,8 +212,7 @@ "from llama_index.core.vector_stores import MetadataFilters\n", "from pathlib import Path\n", "\n", - "from llama_index.core.agent import FunctionCallingAgentWorker\n", - "from llama_index.core.agent import AgentRunner" + "from llama_index.core.agent import FunctionCallingAgent" ] }, { @@ -1085,10 +1084,9 @@ "outputs": [], "source": [ "# Create Agent Runner\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent = FunctionCallingAgent.from_tools(\n", " [summary_tool, vector_query_tool], llm=vertex_gemini, verbose=True\n", - ")\n", - "agent = AgentRunner(agent_worker)" + ")" ] }, { diff --git a/docs/docs/examples/property_graph/property_graph_kuzu.ipynb b/docs/docs/examples/property_graph/property_graph_kuzu.ipynb new file mode 100644 index 0000000000000..47ecb197a8d6e --- /dev/null +++ b/docs/docs/examples/property_graph/property_graph_kuzu.ipynb @@ -0,0 +1,450 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# %pip install llama-index llama-index-embeddings-openai llama-index-graph-stores-kuzu" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[Kùzu](https://kuzudb.com/) is an open source, embedded graph database that's designed for query speed and scalability. It implements the Cypher query language, and utilizes a structured property graph model (a variant of the labelled property graph model) with support for ACID transactions. Because Kùzu is embedded, there's no requirement for a server to set up and use the database.\n", + "\n", + "If you already have an existing graph, please skip to the end of this notebook. Otherwise, let's begin by creating a graph from unstructured text to demonstrate how to use Kùzu as a graph store." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Environment Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"enter your key here\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We will be using OpenAI models for this example, so we'll specify the OpenAI API key." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-08-27 16:12:46-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 2606:50c0:8001::154, 2606:50c0:8002::154, 2606:50c0:8000::154, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|2606:50c0:8001::154|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 75042 (73K) [text/plain]\n", + "Saving to: ‘data/paul_graham/paul_graham_essay.txt’\n", + "\n", + "data/paul_graham/pa 100%[===================>] 73.28K --.-KB/s in 0.04s \n", + "\n", + "2024-08-27 16:12:47 (1.61 MB/s) - ‘data/paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir -p 'data/paul_graham/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SimpleDirectoryReader\n", + "\n", + "documents = SimpleDirectoryReader(\"./data/paul_graham/\").load_data()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Graph Construction\n", + "\n", + "We first need to create an empty Kùzu database directory by calling the `kuzu.Database` constructor. This step instantiates the database and creates the necessary directories and files within a local directory that stores the graph. This `Database` object is then passed to the `KuzuPropertyGraph` constructor." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import shutil\n", + "import kuzu\n", + "\n", + "shutil.rmtree(\"test_db\", ignore_errors=True)\n", + "db = kuzu.Database(\"test_db\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.graph_stores.kuzu import KuzuPropertyGraphStore\n", + "\n", + "graph_store = KuzuPropertyGraphStore(db)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Because Kùzu implements the structured graph property model, it imposes some level of structure on the schema of the graph. In the above case, because we did not specify a relationship schema that we want in our graph, it uses a generic schema, where the relationship types are not constrained, allowing the extracted triples from the LLM to be stored as relationships in the graph." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define models\n", + "\n", + "Below, we'll define the models used for embedding the text and the LLMs that are used to extract triples from the text and generate the response.\n", + "In this case, we specify different temperature settings for the same model - the extraction model has a temperature of 0." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core import Settings\n", + "\n", + "embed_model = OpenAIEmbedding(model_name=\"text-embedding-3-small\")\n", + "extract_llm = OpenAI(model=\"gpt-4o-mini\", temperature=0.0)\n", + "generate_llm = OpenAI(model=\"gpt-4o-mini\", temperature=0.3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Create property graph index without imposing structure\n", + "\n", + "Because we didn't specify the relationship schema above, we can simply invoke the `SchemaLLMPathExtractor` to extract the triples from the text and store them in the graph. We can define the property graph index using Kùzu as the graph store, as shown below:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Parsing nodes: 100%|██████████| 1/1 [00:00<00:00, 17.81it/s]\n", + "Extracting paths from text with schema: 100%|██████████| 22/22 [00:31<00:00, 1.43s/it]\n", + "Generating embeddings: 100%|██████████| 1/1 [00:00<00:00, 1.34it/s]\n", + "Generating embeddings: 100%|██████████| 2/2 [00:00<00:00, 3.06it/s]\n" + ] + } + ], + "source": [ + "from llama_index.core import PropertyGraphIndex\n", + "from llama_index.core.indices.property_graph import SchemaLLMPathExtractor\n", + "\n", + "index = PropertyGraphIndex.from_documents(\n", + " documents,\n", + " embed_model=embed_model,\n", + " kg_extractors=[SchemaLLMPathExtractor(extract_llm)],\n", + " property_graph_store=graph_store,\n", + " show_progress=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that the graph is created, we can explore it in [Kùzu Explorer](https://docs.kuzudb.com/visualization/), a web-base UI, by running a Docker container that pulls the latest image of Kùzu Explorer as follows:\n", + "```bash\n", + "docker run -p 8000:8000 \\\n", + " -v ./test_db:/database \\\n", + " --rm kuzudb/explorer:latest\n", + "```\n", + "\n", + "Then, launch the UI and then visting [http://localhost:8000/](http://localhost:8000/). \n", + "\n", + "The easiest way to see the entire graph is to use a Cypher query like `\"match (a)-[b]->(c) return * limit 200\"`.\n", + "\n", + "To delete the entire graph, you can either delete the `./test_db` directory that contains the database files, or run the Cypher query `\"match (n) detach delete n\"` in the Kùzu Explorer shell." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying and Retrieval" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Interleaf and Viaweb are both products associated with the development of software solutions. Interleaf is linked to Lisp, indicating a relationship where Interleaf may utilize or be built upon Lisp programming language capabilities. Viaweb, on the other hand, is identified as an ecommerce software product and also has a connection to Lisp, suggesting that it may incorporate Lisp in its architecture or functionality. Both products are documented in a text file, which includes details about their creation and modification dates, file size, and type.\n" + ] + } + ], + "source": [ + "# Switch to the generate LLM during retrieval\n", + "Settings.llm = generate_llm\n", + "\n", + "query_engine = index.as_query_engine(include_text=False)\n", + "\n", + "response = query_engine.query(\"Tell me more about Interleaf and Viaweb\")\n", + "\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Create property graph index with structure\n", + "\n", + "The recommended way to use Kùzu is to apply a structured schema to the graph. The schema is defined by specifying the relationship types (including direction) that we want in the graph. The imposition of structure helps with generating triples that are more meaningful for the types of questions we may want to answer from the graph.\n", + "\n", + "By specifying the below validation schema, we can enforce that the graph only contains relationships of the specified types." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Literal\n", + "\n", + "entities = Literal[\"PERSON\", \"PLACE\", \"ORGANIZATION\"]\n", + "relations = Literal[\"HAS\", \"PART_OF\", \"WORKED_ON\", \"WORKED_WITH\", \"WORKED_AT\"]\n", + "# Define the relationship schema that we will pass to our graph store\n", + "# This must be a list of valid triples in the form (head_entity, relation, tail_entity)\n", + "validation_schema = [\n", + " (\"ORGANIZATION\", \"HAS\", \"PERSON\"),\n", + " (\"PERSON\", \"WORKED_AT\", \"ORGANIZATION\"),\n", + " (\"PERSON\", \"WORKED_WITH\", \"PERSON\"),\n", + " (\"PERSON\", \"WORKED_ON\", \"ORGANIZATION\"),\n", + " (\"PERSON\", \"PART_OF\", \"ORGANIZATION\"),\n", + " (\"ORGANIZATION\", \"PART_OF\", \"ORGANIZATION\"),\n", + " (\"PERSON\", \"WORKED_AT\", \"PLACE\"),\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create a new empty database\n", + "shutil.rmtree(\"test_db\", ignore_errors=True)\n", + "db = kuzu.Database(\"test_db\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Along with the `Database` constructor, we also specify two additional arguments to the property graph store: `has_structured_schema=True` and `relationship_schema=validation_schema`, which provides Kùzu additional information as it instantiates a new graph." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "graph_store = KuzuPropertyGraphStore(\n", + " db,\n", + " has_structured_schema=True,\n", + " relationship_schema=validation_schema,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To construct a property graph with the desired schema, observe that we specify a few additional arguments to the `SchemaLLMPathExtractor`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Parsing nodes: 100%|██████████| 1/1 [00:00<00:00, 16.23it/s]\n", + "Extracting paths from text with schema: 100%|██████████| 22/22 [00:29<00:00, 1.34s/it]\n", + "Generating embeddings: 100%|██████████| 1/1 [00:00<00:00, 1.17it/s]\n", + "Generating embeddings: 100%|██████████| 4/4 [00:01<00:00, 3.69it/s]\n" + ] + } + ], + "source": [ + "index = PropertyGraphIndex.from_documents(\n", + " documents,\n", + " embed_model=OpenAIEmbedding(model_name=\"text-embedding-3-small\"),\n", + " kg_extractors=[\n", + " SchemaLLMPathExtractor(\n", + " llm=OpenAI(model=\"gpt-4o-mini\", temperature=0.0),\n", + " possible_entities=entities,\n", + " possible_relations=relations,\n", + " kg_validation_schema=validation_schema,\n", + " strict=True, # if false, will allow triples outside of the schema\n", + " )\n", + " ],\n", + " property_graph_store=graph_store,\n", + " show_progress=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now apply the query engine on the index as before." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Interleaf and Viaweb are both organizations mentioned in the provided information. Interleaf is associated with Emacs, indicating a connection to text editing or software development environments. Viaweb, on the other hand, has several associations, including individuals like Julian and Idelle, as well as the programming language Lisp. This suggests that Viaweb may have a broader scope, potentially involving web development or e-commerce, given its historical context as an early web application platform. Both organizations appear to have been referenced in a document related to Paul Graham, indicating their relevance in discussions around technology or entrepreneurship.\n" + ] + } + ], + "source": [ + "# Switch to the generate LLM during retrieval\n", + "Settings.llm = generate_llm\n", + "\n", + "query_engine = index.as_query_engine(include_text=False)\n", + "\n", + "response2 = query_engine.query(\"Tell me more about Interleaf and Viaweb\")\n", + "print(str(response2))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Use existing graph\n", + "\n", + "You can reuse an existing `Database` object to connect to its underlying `PropertyGraphIndex`. This is useful when you want to query the graph without having to re-extract the triples from the text." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Viaweb was founded by Paul Graham. The specific founding date is not provided in the information available.\n" + ] + } + ], + "source": [ + "graph_store = KuzuPropertyGraphStore(db)\n", + "\n", + "# Set up the property graph index\n", + "index = PropertyGraphIndex.from_existing(\n", + " embed_model=embed_model,\n", + " llm=generate_llm,\n", + " property_graph_store=graph_store,\n", + ")\n", + "\n", + "query_engine = index.as_query_engine(include_text=False)\n", + "\n", + "response3 = query_engine.query(\"When was Viaweb founded, and by whom?\")\n", + "print(str(response3))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For full details on construction, retrieval, querying of a property graph, see the [full docs page](../../module_guides/indexing/lpg_index_guide.md)." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "llama", + "language": "python", + "name": "llama" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/examples/property_graph/property_graph_neptune.ipynb b/docs/docs/examples/property_graph/property_graph_neptune.ipynb index 547c39490adbc..0a7122e1a8bad 100644 --- a/docs/docs/examples/property_graph/property_graph_neptune.ipynb +++ b/docs/docs/examples/property_graph/property_graph_neptune.ipynb @@ -273,7 +273,7 @@ "metadata": {}, "outputs": [], "source": [ - "from pydantic.v1 import BaseModel, \n", + "from pydantic import BaseModel, Field\n", "from llama_index.core.indices.property_graph import CypherTemplateRetriever\n", "\n", "cypher_query = \"\"\"\n", @@ -282,12 +282,14 @@ " RETURN c.text, o.name, o.label;\n", " \"\"\"\n", "\n", + "\n", "class TemplateParams(BaseModel):\n", " \"\"\"Template params for a cypher query.\"\"\"\n", "\n", " names: list[str] = Field(\n", - " description=\"A list of entity names or keywords to use for lookup in a knowledge graph.\"\n", - " )\n", + " description=\"A list of entity names or keywords to use for lookup in a knowledge graph.\"\n", + " )\n", + "\n", "\n", "cypher_retriever = CypherTemplateRetriever(\n", " index.property_graph_store, TemplateParams, cypher_query\n", diff --git a/docs/docs/examples/query_engine/CustomRetrievers.ipynb b/docs/docs/examples/query_engine/CustomRetrievers.ipynb index 507df4a1651cd..a4d7c1da67086 100644 --- a/docs/docs/examples/query_engine/CustomRetrievers.ipynb +++ b/docs/docs/examples/query_engine/CustomRetrievers.ipynb @@ -5,7 +5,7 @@ "id": "7f6607dd", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/JSONalyze_query_engine.ipynb b/docs/docs/examples/query_engine/JSONalyze_query_engine.ipynb index 217066c181b39..62809ccda16b5 100644 --- a/docs/docs/examples/query_engine/JSONalyze_query_engine.ipynb +++ b/docs/docs/examples/query_engine/JSONalyze_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open\n", + "\"Open\n", "\n", "# JSONalyze Query Engine" ] diff --git a/docs/docs/examples/query_engine/JointQASummary.ipynb b/docs/docs/examples/query_engine/JointQASummary.ipynb index 9323280878c5e..abf133da7499d 100644 --- a/docs/docs/examples/query_engine/JointQASummary.ipynb +++ b/docs/docs/examples/query_engine/JointQASummary.ipynb @@ -5,7 +5,7 @@ "id": "5f1c83e5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/SQLAutoVectorQueryEngine.ipynb b/docs/docs/examples/query_engine/SQLAutoVectorQueryEngine.ipynb index 5a5e023828979..a98e6db414711 100644 --- a/docs/docs/examples/query_engine/SQLAutoVectorQueryEngine.ipynb +++ b/docs/docs/examples/query_engine/SQLAutoVectorQueryEngine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/SQLJoinQueryEngine.ipynb b/docs/docs/examples/query_engine/SQLJoinQueryEngine.ipynb index 1b2bc22e29de4..60079406078b0 100644 --- a/docs/docs/examples/query_engine/SQLJoinQueryEngine.ipynb +++ b/docs/docs/examples/query_engine/SQLJoinQueryEngine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/SQLRouterQueryEngine.ipynb b/docs/docs/examples/query_engine/SQLRouterQueryEngine.ipynb index f533822dd3295..b1af9610cf423 100644 --- a/docs/docs/examples/query_engine/SQLRouterQueryEngine.ipynb +++ b/docs/docs/examples/query_engine/SQLRouterQueryEngine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/citation_query_engine.ipynb b/docs/docs/examples/query_engine/citation_query_engine.ipynb index 2e03423a87ec9..5f65806d58ce4 100644 --- a/docs/docs/examples/query_engine/citation_query_engine.ipynb +++ b/docs/docs/examples/query_engine/citation_query_engine.ipynb @@ -5,7 +5,7 @@ "id": "f560d34d", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/ensemble_query_engine.ipynb b/docs/docs/examples/query_engine/ensemble_query_engine.ipynb index ebbdccd6db885..890b606d28039 100644 --- a/docs/docs/examples/query_engine/ensemble_query_engine.ipynb +++ b/docs/docs/examples/query_engine/ensemble_query_engine.ipynb @@ -5,7 +5,7 @@ "id": "9bbed780", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/flare_query_engine.ipynb b/docs/docs/examples/query_engine/flare_query_engine.ipynb index 73290b04d0266..54f1d4c5272f7 100644 --- a/docs/docs/examples/query_engine/flare_query_engine.ipynb +++ b/docs/docs/examples/query_engine/flare_query_engine.ipynb @@ -5,7 +5,7 @@ "id": "5e329054", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/json_query_engine.ipynb b/docs/docs/examples/query_engine/json_query_engine.ipynb index 5471e8688f8fc..5c9f4fdee9727 100644 --- a/docs/docs/examples/query_engine/json_query_engine.ipynb +++ b/docs/docs/examples/query_engine/json_query_engine.ipynb @@ -5,7 +5,7 @@ "id": "fa753135", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/knowledge_graph_query_engine.ipynb b/docs/docs/examples/query_engine/knowledge_graph_query_engine.ipynb index ce04d70d9b504..915643f7babdd 100644 --- a/docs/docs/examples/query_engine/knowledge_graph_query_engine.ipynb +++ b/docs/docs/examples/query_engine/knowledge_graph_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/knowledge_graph_rag_query_engine.ipynb b/docs/docs/examples/query_engine/knowledge_graph_rag_query_engine.ipynb index 4d82a472fc03b..61c10608c096b 100644 --- a/docs/docs/examples/query_engine/knowledge_graph_rag_query_engine.ipynb +++ b/docs/docs/examples/query_engine/knowledge_graph_rag_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/pandas_query_engine.ipynb b/docs/docs/examples/query_engine/pandas_query_engine.ipynb index 3343353f2b7f7..63e1d832e170d 100644 --- a/docs/docs/examples/query_engine/pandas_query_engine.ipynb +++ b/docs/docs/examples/query_engine/pandas_query_engine.ipynb @@ -5,7 +5,7 @@ "id": "8329aae0", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_engine/sub_question_query_engine.ipynb b/docs/docs/examples/query_engine/sub_question_query_engine.ipynb index c496bd350ba04..40eac6b8cc637 100644 --- a/docs/docs/examples/query_engine/sub_question_query_engine.ipynb +++ b/docs/docs/examples/query_engine/sub_question_query_engine.ipynb @@ -5,7 +5,7 @@ "id": "0e3e4245", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_transformations/HyDEQueryTransformDemo.ipynb b/docs/docs/examples/query_transformations/HyDEQueryTransformDemo.ipynb index ac6b2633128e4..0cf642d327f0b 100644 --- a/docs/docs/examples/query_transformations/HyDEQueryTransformDemo.ipynb +++ b/docs/docs/examples/query_transformations/HyDEQueryTransformDemo.ipynb @@ -5,7 +5,7 @@ "id": "033d7aed", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_transformations/SimpleIndexDemo-multistep.ipynb b/docs/docs/examples/query_transformations/SimpleIndexDemo-multistep.ipynb index 6ac46ee828be1..6ef079b814277 100644 --- a/docs/docs/examples/query_transformations/SimpleIndexDemo-multistep.ipynb +++ b/docs/docs/examples/query_transformations/SimpleIndexDemo-multistep.ipynb @@ -5,7 +5,7 @@ "id": "5e0768d5", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/query_transformations/query_transform_cookbook.ipynb b/docs/docs/examples/query_transformations/query_transform_cookbook.ipynb index 79a9cd4c01c18..1270db6f92dae 100644 --- a/docs/docs/examples/query_transformations/query_transform_cookbook.ipynb +++ b/docs/docs/examples/query_transformations/query_transform_cookbook.ipynb @@ -7,7 +7,7 @@ "source": [ "# Query Transform Cookbook \n", "\n", - "\"Open\n", + "\"Open\n", "\n", "A user query can be transformed and decomposed in many ways before being executed as part of a RAG query engine, agent, or any other pipeline.\n", "\n", diff --git a/docs/docs/examples/response_synthesizers/long_context_test.ipynb b/docs/docs/examples/response_synthesizers/long_context_test.ipynb index be351f1befdba..ca836e0c82a56 100644 --- a/docs/docs/examples/response_synthesizers/long_context_test.ipynb +++ b/docs/docs/examples/response_synthesizers/long_context_test.ipynb @@ -7,7 +7,7 @@ "source": [ "# Stress-Testing Long Context LLMs with a Recall Task\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "In this section we stress-test long context recall capabilities of GPT-4 and Claude v2. This is inspired by [Greg Kamradt's tweet](https://x.com/GregKamradt/status/1722386725635580292?s=20). \n", "\n", diff --git a/docs/docs/examples/response_synthesizers/refine.ipynb b/docs/docs/examples/response_synthesizers/refine.ipynb index 0941662a9fd87..acd847322f6af 100644 --- a/docs/docs/examples/response_synthesizers/refine.ipynb +++ b/docs/docs/examples/response_synthesizers/refine.ipynb @@ -5,7 +5,7 @@ "id": "91dea8fd", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/response_synthesizers/structured_refine.ipynb b/docs/docs/examples/response_synthesizers/structured_refine.ipynb index 825b0df0a0f6d..f06f43ba7e116 100644 --- a/docs/docs/examples/response_synthesizers/structured_refine.ipynb +++ b/docs/docs/examples/response_synthesizers/structured_refine.ipynb @@ -5,7 +5,7 @@ "id": "def266be", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/response_synthesizers/tree_summarize.ipynb b/docs/docs/examples/response_synthesizers/tree_summarize.ipynb index 493f83a30c2c6..554ecedf381e0 100644 --- a/docs/docs/examples/response_synthesizers/tree_summarize.ipynb +++ b/docs/docs/examples/response_synthesizers/tree_summarize.ipynb @@ -5,7 +5,7 @@ "id": "dad51427", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/retrievers/auto_merging_retriever.ipynb b/docs/docs/examples/retrievers/auto_merging_retriever.ipynb index f1f8d5f387f0c..17dff92bd3c17 100644 --- a/docs/docs/examples/retrievers/auto_merging_retriever.ipynb +++ b/docs/docs/examples/retrievers/auto_merging_retriever.ipynb @@ -5,7 +5,7 @@ "id": "b39acfe0", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/retrievers/bm25_retriever.ipynb b/docs/docs/examples/retrievers/bm25_retriever.ipynb index a4a4ab2e1a5d9..27443b4ec00fc 100644 --- a/docs/docs/examples/retrievers/bm25_retriever.ipynb +++ b/docs/docs/examples/retrievers/bm25_retriever.ipynb @@ -5,7 +5,7 @@ "id": "723fcf40", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/retrievers/multi_doc_together_hybrid.ipynb b/docs/docs/examples/retrievers/multi_doc_together_hybrid.ipynb index 7a7e976423b3b..9e4fe03904b82 100644 --- a/docs/docs/examples/retrievers/multi_doc_together_hybrid.ipynb +++ b/docs/docs/examples/retrievers/multi_doc_together_hybrid.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/retrievers/recursive_retriever_nodes.ipynb b/docs/docs/examples/retrievers/recursive_retriever_nodes.ipynb index 8e0b8da30911d..8febccad0e726 100644 --- a/docs/docs/examples/retrievers/recursive_retriever_nodes.ipynb +++ b/docs/docs/examples/retrievers/recursive_retriever_nodes.ipynb @@ -5,7 +5,7 @@ "id": "94f8a023", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/retrievers/vectara_auto_retriever.ipynb b/docs/docs/examples/retrievers/vectara_auto_retriever.ipynb index 3cc4b043ab243..4e3484b999230 100644 --- a/docs/docs/examples/retrievers/vectara_auto_retriever.ipynb +++ b/docs/docs/examples/retrievers/vectara_auto_retriever.ipynb @@ -5,7 +5,7 @@ "id": "0e81b124", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/selectors/not_diamond_selector.ipynb b/docs/docs/examples/selectors/not_diamond_selector.ipynb new file mode 100644 index 0000000000000..d3b3ddddec3e3 --- /dev/null +++ b/docs/docs/examples/selectors/not_diamond_selector.ipynb @@ -0,0 +1,343 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using Not Diamond to Select LLMs For Indexes\n", + "In this tutorial, we demonstrate how to use a router query engine with a selector powered by [Not Diamond](https://www.notdiamond.ai). You can automatically route a query to one of several available LLMs, which will then select the best index for your needs." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython -m pip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install -q llama-index-llms-anthropic llama-index-llms-openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3.11 -m pip install --upgrade pip\u001b[0m\n" + ] + } + ], + "source": [ + "!pip install -q llama-index notdiamond" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# NOTE: This is ONLY necessary in jupyter notebook.\n", + "# Details: Jupyter runs an event-loop behind the scenes.\n", + "# This results in nested event-loops when we start an event-loop to make async queries.\n", + "# This is normally not allowed, we use nest_asyncio to allow it for convenience.\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!mkdir -p 'data/paul_graham/'\n", + "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Routing Queries With Not Diamond" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from typing import List\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"\n", + "os.environ[\"ANTHROPIC_API_KEY\"] = \"sk-ant-...\"\n", + "os.environ[\"NOTDIAMOND_API_KEY\"] = \"sk-...\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Indexes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import (\n", + " SimpleDirectoryReader,\n", + " VectorStoreIndex,\n", + " SummaryIndex,\n", + " Settings,\n", + ")\n", + "from llama_index.core.query_engine import RouterQueryEngine\n", + "from llama_index.core.tools import QueryEngineTool\n", + "from llama_index.selectors.notdiamond.base import NotDiamondSelector\n", + "\n", + "# load documents\n", + "documents = SimpleDirectoryReader(\"data/paul_graham\").load_data()\n", + "nodes = Settings.node_parser.get_nodes_from_documents(documents)\n", + "\n", + "# index documents\n", + "vector_index = VectorStoreIndex.from_documents(documents)\n", + "summary_index = SummaryIndex.from_documents(documents)\n", + "query_text = \"What was Paul Graham's role at Yahoo?\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set up Tools for the QueryEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "list_query_engine = summary_index.as_query_engine(\n", + " response_mode=\"tree_summarize\",\n", + " use_async=True,\n", + ")\n", + "vector_query_engine = vector_index.as_query_engine()\n", + "\n", + "list_tool = QueryEngineTool.from_defaults(\n", + " query_engine=list_query_engine,\n", + " description=(\n", + " \"Useful for summarization questions related to Paul Graham eassy on\"\n", + " \" What I Worked On.\"\n", + " ),\n", + ")\n", + "\n", + "vector_tool = QueryEngineTool.from_defaults(\n", + " query_engine=vector_query_engine,\n", + " description=(\n", + " \"Useful for retrieving specific context from Paul Graham essay on What\"\n", + " \" I Worked On.\"\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create a NotDiamondSelector and RouterQueryEngine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from notdiamond import NotDiamond\n", + "\n", + "client = NotDiamond(\n", + " api_key=os.environ[\"NOTDIAMOND_API_KEY\"],\n", + " llm_configs=[\"openai/gpt-4o\", \"anthropic/claude-3-5-sonnet-20240620\"],\n", + ")\n", + "preference_id = client.create_preference_id()\n", + "client.preference_id = preference_id\n", + "\n", + "nd_selector = NotDiamondSelector(client=client)\n", + "\n", + "query_engine = RouterQueryEngine(\n", + " selector=nd_selector,\n", + " query_engine_tools=[\n", + " list_tool,\n", + " vector_tool,\n", + " ],\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Use Not Diamond to Query Indexes\n", + "\n", + "Once we've set up our indexes and query engine, we can submit queries as usual." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham has contributed to the field of technology and entrepreneurship through his essays and datasets. He has provided a labelled RAG dataset based on one of his essays, which includes queries, reference answers, and reference contexts. Additionally, he has shared insights and code related to using the LlamaIndex RAG pipeline for evaluation purposes.\n" + ] + } + ], + "source": [ + "response = query_engine.query(\n", + " \"Please summarize Paul Graham's working experience.\"\n", + ")\n", + "print(str(response))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Paul Graham founded Viaweb after RICS.\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What did Paul Graham do after RICS?\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using NotDiamondSelector as a standalone selector\n", + "\n", + "As with LlamaIndex's built-in selectors, you can also use the `NotDiamondSelector` to select an index." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "selections=[SingleSelection(index=1, reason=\"The question asks for a summary of a recipe for deviled eggs, which directly aligns with option 2: 'Great for summarizing recipes.' This choice is most relevant as it specifically addresses the task of summarizing recipes, which is exactly what the question is asking for.\")] session_id='078a837f-d8da-4de4-aec9-b44df5ea32ba' llm=LLMConfig(anthropic/claude-3-5-sonnet-20240620)\n" + ] + } + ], + "source": [ + "from llama_index.core.tools import ToolMetadata\n", + "from llama_index.selectors.notdiamond.base import NotDiamondSelector\n", + "\n", + "from notdiamond import NotDiamond, Metric\n", + "\n", + "choices = [\n", + " ToolMetadata(\n", + " name=\"vector_index\",\n", + " description=\"Great for asking questions about recipes.\",\n", + " ),\n", + " ToolMetadata(\n", + " name=\"list_index\", description=\"Great for summarizing recipes.\"\n", + " ),\n", + "]\n", + "\n", + "llm_configs = [\"openai/gpt-4o\", \"anthropic/claude-3-5-sonnet-20240620\"]\n", + "nd_client = NotDiamond(\n", + " api_key=os.environ[\"NOTDIAMOND_API_KEY\"],\n", + " llm_configs=llm_configs,\n", + " preference_id=preference_id,\n", + ")\n", + "preference_id = nd_client.create_preference_id()\n", + "nd_client.preference_id = preference_id\n", + "nd_selector = NotDiamondSelector(client=nd_client)\n", + "\n", + "nd_result = nd_selector.select(\n", + " choices, query=\"What is the summary of this recipe for deviled eggs?\"\n", + ")\n", + "print(nd_result)\n", + "\n", + "# Use the result's session_id to customize your routing logic.\n", + "metric = Metric(\"accuracy\")\n", + "score = metric.feedback(\n", + " session_id=nd_result.session_id,\n", + " llm_config=nd_result.llm,\n", + " value=1,\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/examples/structured_outputs/structured_outputs.ipynb b/docs/docs/examples/structured_outputs/structured_outputs.ipynb index 13ac310276d74..b248c1a5c6e70 100644 --- a/docs/docs/examples/structured_outputs/structured_outputs.ipynb +++ b/docs/docs/examples/structured_outputs/structured_outputs.ipynb @@ -67,7 +67,7 @@ "outputs": [], "source": [ "from typing import List\n", - "from pydantic.v1 import BaseModel, Field\n", + "from pydantic import BaseModel, Field\n", "\n", "\n", "class Song(BaseModel):\n", @@ -565,7 +565,7 @@ "metadata": {}, "outputs": [], "source": [ - "from pydantic.v1 import BaseModel, Field\n", + "from pydantic import BaseModel, Field\n", "from typing import List\n", "\n", "\n", diff --git a/docs/docs/examples/tools/OnDemandLoaderTool.ipynb b/docs/docs/examples/tools/OnDemandLoaderTool.ipynb index a26cd92705457..12b6df18cdc90 100644 --- a/docs/docs/examples/tools/OnDemandLoaderTool.ipynb +++ b/docs/docs/examples/tools/OnDemandLoaderTool.ipynb @@ -5,7 +5,7 @@ "id": "4f4b3e2a", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/AnalyticDBDemo.ipynb b/docs/docs/examples/vector_stores/AnalyticDBDemo.ipynb index c866c237f393d..1e77d9f8ebeb6 100644 --- a/docs/docs/examples/vector_stores/AnalyticDBDemo.ipynb +++ b/docs/docs/examples/vector_stores/AnalyticDBDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/AzureAISearchIndexDemo.ipynb b/docs/docs/examples/vector_stores/AzureAISearchIndexDemo.ipynb index d7b2cb5a5fdc1..97ea0fa21c774 100644 --- a/docs/docs/examples/vector_stores/AzureAISearchIndexDemo.ipynb +++ b/docs/docs/examples/vector_stores/AzureAISearchIndexDemo.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/AzureCosmosDBNoSqlDemo.ipynb b/docs/docs/examples/vector_stores/AzureCosmosDBNoSqlDemo.ipynb new file mode 100644 index 0000000000000..33c5e2091ea02 --- /dev/null +++ b/docs/docs/examples/vector_stores/AzureCosmosDBNoSqlDemo.ipynb @@ -0,0 +1,246 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "6463dfe0-31f0-494e-995e-9d3b96db0eeb", + "metadata": {}, + "source": [ + "# Azure Cosmos DB No SQL Vector Store\n", + "\n", + "In this notebook we are going to show a quick demo of how to use AzureCosmosDBNoSqlVectorSearch to perform vector searches in LlamaIndex.\n", + "\n", + "If you're opening this Notebook on colab, you will probably need to install LlamaIndex 🦙." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d865e38e-7cfb-44fc-a811-ccbbb6bd5c8e", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install llama-index-embeddings-openai\n", + "%pip install llama-index-llms-azure-openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ed73758-4a14-4c9e-a4de-7c9c584fbdc0", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install llama-index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0609a213-479b-4924-8a31-07f9076bcb4a", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import openai\n", + "from llama_index.llms.azure_openai import AzureOpenAI\n", + "from llama_index.embeddings.openai import OpenAIEmbedding\n", + "from llama_index.core import VectorStoreIndex, SimpleDirectoryReader\n", + "from llama_index.embeddings.azure_openai import AzureOpenAIEmbedding" + ] + }, + { + "cell_type": "markdown", + "id": "d1cf060d-7ab1-4a56-8098-4fb306d3401e", + "metadata": {}, + "source": [ + "# Setup Azure OpenAI\n", + "\n", + "The first step is to configure the llm and the embeddings model. These models will be used to create embeddings for the documents loaded into the database and for llm completions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "230c386e-b118-4cef-aabe-37f78e478f97", + "metadata": {}, + "outputs": [], + "source": [ + "llm = AzureOpenAI(\n", + " model=\"AZURE_OPENAI_MODEL\",\n", + " deployment_name=\"AZURE_OPENAI_DEPLOYMENT_NAME\",\n", + " azure_endpoint=\"AZURE_OPENAI_BASE\",\n", + " api_key=\"AZURE_OPENAI_KEY\",\n", + " api_version=\"AZURE_OPENAI_VERSION\",\n", + ")\n", + "\n", + "embed_model = AzureOpenAIEmbedding(\n", + " model=\"AZURE_OPENAI_EMBEDDING_MODEL\",\n", + " deployment_name=\"AZURE_OPENAI_EMBEDDING_MODEL_DEPLOYMENT_NAME\",\n", + " azure_endpoint=\"AZURE_OPENAI_BASE\",\n", + " api_key=\"AZURE_OPENAI_KEY\",\n", + " api_version=\"AZURE_OPENAI_VERSION\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8da35d45-9689-4f3a-9011-1cda0fb361ea", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import Settings\n", + "\n", + "Settings.llm = llm\n", + "Settings.embed_model = embed_model" + ] + }, + { + "cell_type": "markdown", + "id": "084aa964-7222-47b2-bdab-825c85a6ffed", + "metadata": {}, + "source": [ + "# Loading Documents\n", + "\n", + "In this example we will be using the paul_graham essay which will be processed by the SimpleDirectoryReader." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f689978-93c6-4c34-9a6e-9fca606a1058", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SimpleDirectoryReader\n", + "\n", + "documents = SimpleDirectoryReader(\n", + " input_files=[r\"\\docs\\examples\\data\\paul_graham\\paul_graham_essay.txt\"]\n", + ").load_data()\n", + "\n", + "print(\"Document ID:\", documents[0].doc_id)" + ] + }, + { + "cell_type": "markdown", + "id": "e6c5f4bf-411e-482d-8ada-580dad6575ee", + "metadata": {}, + "source": [ + "# Create the index\n", + "\n", + "Here we establish the connection to cosmos db nosql and create a vector store index." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4eb1251c-8bbb-416d-9c32-c7260d039900", + "metadata": {}, + "outputs": [], + "source": [ + "from azure.cosmos import CosmosClient, PartitionKey\n", + "from llama_index.vector_stores.azurecosmosnosql import (\n", + " AzureCosmosDBNoSqlVectorSearch,\n", + ")\n", + "from llama_index.core import StorageContext\n", + "\n", + "# create cosmos client\n", + "URI = \"AZURE_COSMOSDB_URI\"\n", + "KEY = \"AZURE_COSMOSDB_KEY\"\n", + "client = CosmosClient(URI, credential=KEY)\n", + "\n", + "# specify vector store properties\n", + "indexing_policy = {\n", + " \"indexingMode\": \"consistent\",\n", + " \"includedPaths\": [{\"path\": \"/*\"}],\n", + " \"excludedPaths\": [{\"path\": '/\"_etag\"/?'}],\n", + " \"vectorIndexes\": [{\"path\": \"/embedding\", \"type\": \"quantizedFlat\"}],\n", + "}\n", + "\n", + "vector_embedding_policy = {\n", + " \"vectorEmbeddings\": [\n", + " {\n", + " \"path\": \"/embedding\",\n", + " \"dataType\": \"float32\",\n", + " \"distanceFunction\": \"cosine\",\n", + " \"dimensions\": 3072,\n", + " }\n", + " ]\n", + "}\n", + "\n", + "partition_key = PartitionKey(path=\"/id\")\n", + "cosmos_container_properties_test = {\"partition_key\": partition_key}\n", + "cosmos_database_properties_test = {}\n", + "\n", + "# create vector store\n", + "store = AzureCosmosDBNoSqlVectorSearch(\n", + " cosmos_client=client,\n", + " vector_embedding_policy=vector_embedding_policy,\n", + " indexing_policy=indexing_policy,\n", + " cosmos_container_properties=cosmos_container_properties_test,\n", + " cosmos_database_properties=cosmos_database_properties_test,\n", + " create_container=True,\n", + ")\n", + "\n", + "storage_context = StorageContext.from_defaults(vector_store=store)\n", + "\n", + "index = VectorStoreIndex.from_documents(\n", + " documents, storage_context=storage_context\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "70671760-c408-4f94-b4c8-f9b7aad47644", + "metadata": {}, + "source": [ + "# Query the index\n", + "We can now ask questions using our index." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "930a6143-62c9-4377-8955-0c05bfb7e1a2", + "metadata": {}, + "outputs": [], + "source": [ + "query_engine = index.as_query_engine()\n", + "response = query_engine.query(\"What did the author love working on?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c572a6cd-34db-47e1-897c-a03048173882", + "metadata": {}, + "outputs": [], + "source": [ + "import textwrap\n", + "\n", + "print(textwrap.fill(str(response), 100))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/examples/vector_stores/FirestoreVectorStore.ipynb b/docs/docs/examples/vector_stores/FirestoreVectorStore.ipynb index e1a15daf4114f..0aa5e3ae5fd60 100644 --- a/docs/docs/examples/vector_stores/FirestoreVectorStore.ipynb +++ b/docs/docs/examples/vector_stores/FirestoreVectorStore.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/HologresDemo.ipynb b/docs/docs/examples/vector_stores/HologresDemo.ipynb index dd6667d8cdabd..97cb079ef9984 100644 --- a/docs/docs/examples/vector_stores/HologresDemo.ipynb +++ b/docs/docs/examples/vector_stores/HologresDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/LanternIndexDemo.ipynb b/docs/docs/examples/vector_stores/LanternIndexDemo.ipynb index e602b5f529c1c..37d9f024e038b 100644 --- a/docs/docs/examples/vector_stores/LanternIndexDemo.ipynb +++ b/docs/docs/examples/vector_stores/LanternIndexDemo.ipynb @@ -6,7 +6,7 @@ "id": "bccd47fc", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/LindormDemo.ipynb b/docs/docs/examples/vector_stores/LindormDemo.ipynb index 6e7cf94382ecc..7f45f409e38d6 100644 --- a/docs/docs/examples/vector_stores/LindormDemo.ipynb +++ b/docs/docs/examples/vector_stores/LindormDemo.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/docs/docs/examples/vector_stores/TablestoreDemo.ipynb b/docs/docs/examples/vector_stores/TablestoreDemo.ipynb new file mode 100644 index 0000000000000..b92c4b6d92c20 --- /dev/null +++ b/docs/docs/examples/vector_stores/TablestoreDemo.ipynb @@ -0,0 +1,301 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# TablestoreVectorStore\n", + "\n", + "> [Tablestore](https://www.aliyun.com/product/ots) is a fully managed NoSQL cloud database service that enables storage of a massive amount of structured\n", + "and semi-structured data.\n", + "\n", + "This notebook shows how to use functionality related to the `Tablestore` vector database.\n", + "\n", + "To use Tablestore, you must create an instance.\n", + "Here are the [creating instance instructions](https://help.aliyun.com/zh/tablestore/getting-started/manage-the-wide-column-model-in-the-tablestore-console)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install llama-index-vector-stores-tablestore" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "os.environ[\"end_point\"] = getpass.getpass(\"Tablestore end_point:\")\n", + "os.environ[\"instance_name\"] = getpass.getpass(\"Tablestore instance_name:\")\n", + "os.environ[\"access_key_id\"] = getpass.getpass(\"Tablestore access_key_id:\")\n", + "os.environ[\"access_key_secret\"] = getpass.getpass(\n", + " \"Tablestore access_key_secret:\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create vector store. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "from llama_index.core import MockEmbedding\n", + "from llama_index.core.schema import TextNode\n", + "from llama_index.core.vector_stores import (\n", + " VectorStoreQuery,\n", + " MetadataFilters,\n", + " MetadataFilter,\n", + " FilterCondition,\n", + " FilterOperator,\n", + ")\n", + "from tablestore import FieldSchema, FieldType, VectorMetricType\n", + "\n", + "from llama_index.vector_stores.tablestore import TablestoreVectorStore\n", + "\n", + "vector_dimension = 4\n", + "\n", + "store = TablestoreVectorStore(\n", + " endpoint=os.getenv(\"end_point\"),\n", + " instance_name=os.getenv(\"instance_name\"),\n", + " access_key_id=os.getenv(\"access_key_id\"),\n", + " access_key_secret=os.getenv(\"access_key_secret\"),\n", + " vector_dimension=vector_dimension,\n", + " vector_metric_type=VectorMetricType.VM_COSINE,\n", + " # metadata mapping is used to filter non-vector fields.\n", + " metadata_mappings=[\n", + " FieldSchema(\n", + " \"type\", FieldType.KEYWORD, index=True, enable_sort_and_agg=True\n", + " ),\n", + " FieldSchema(\n", + " \"time\", FieldType.LONG, index=True, enable_sort_and_agg=True\n", + " ),\n", + " ],\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create table and index." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "store.create_table_if_not_exist()\n", + "store.create_search_index_if_not_exist()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "New a mock embedding for test." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "embedder = MockEmbedding(vector_dimension)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Prepare some docs." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "texts = [\n", + " TextNode(\n", + " id_=\"1\",\n", + " text=\"The lives of two mob hitmen, a boxer, a gangster and his wife, and a pair of diner bandits intertwine in four tales of violence and redemption.\",\n", + " metadata={\"type\": \"a\", \"time\": 1995},\n", + " ),\n", + " TextNode(\n", + " id_=\"2\",\n", + " text=\"When the menace known as the Joker wreaks havoc and chaos on the people of Gotham, Batman must accept one of the greatest psychological and physical tests of his ability to fight injustice.\",\n", + " metadata={\"type\": \"a\", \"time\": 1990},\n", + " ),\n", + " TextNode(\n", + " id_=\"3\",\n", + " text=\"An insomniac office worker and a devil-may-care soapmaker form an underground fight club that evolves into something much, much more.\",\n", + " metadata={\"type\": \"a\", \"time\": 2009},\n", + " ),\n", + " TextNode(\n", + " id_=\"4\",\n", + " text=\"A thief who steals corporate secrets through the use of dream-sharing technology is given the inverse task of planting an idea into thed of a C.E.O.\",\n", + " metadata={\"type\": \"a\", \"time\": 2023},\n", + " ),\n", + " TextNode(\n", + " id_=\"5\",\n", + " text=\"A computer hacker learns from mysterious rebels about the true nature of his reality and his role in the war against its controllers.\",\n", + " metadata={\"type\": \"b\", \"time\": 2018},\n", + " ),\n", + " TextNode(\n", + " id_=\"6\",\n", + " text=\"Two detectives, a rookie and a veteran, hunt a serial killer who uses the seven deadly sins as his motives.\",\n", + " metadata={\"type\": \"c\", \"time\": 2010},\n", + " ),\n", + " TextNode(\n", + " id_=\"7\",\n", + " text=\"An organized crime dynasty's aging patriarch transfers control of his clandestine empire to his reluctant son.\",\n", + " metadata={\"type\": \"a\", \"time\": 2023},\n", + " ),\n", + "]\n", + "for t in texts:\n", + " t.embedding = embedder.get_text_embedding(t.text)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Write some docs." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['1', '2', '3', '4', '5', '6', '7']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "store.add(texts)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Delete docs." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "store.delete(\"1\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Query with filters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "VectorStoreQueryResult(nodes=[TextNode(id_='2', embedding=[0.5, 0.5, 0.5, 0.5], metadata={'time': 1990, 'type': 'a'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, text='When the menace known as the Joker wreaks havoc and chaos on the people of Gotham, Batman must accept one of the greatest psychological and physical tests of his ability to fight injustice.', mimetype='text/plain', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n'), TextNode(id_='3', embedding=[0.5, 0.5, 0.5, 0.5], metadata={'time': 2009, 'type': 'a'}, excluded_embed_metadata_keys=[], excluded_llm_metadata_keys=[], relationships={}, text='An insomniac office worker and a devil-may-care soapmaker form an underground fight club that evolves into something much, much more.', mimetype='text/plain', start_char_idx=None, end_char_idx=None, text_template='{metadata_str}\\n\\n{content}', metadata_template='{key}: {value}', metadata_seperator='\\n')], similarities=None, ids=['2', '3'])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "store.query(\n", + " query=VectorStoreQuery(\n", + " query_embedding=embedder.get_text_embedding(\"nature fight physical\"),\n", + " similarity_top_k=5,\n", + " filters=MetadataFilters(\n", + " filters=[\n", + " MetadataFilter(\n", + " key=\"type\", value=\"a\", operator=FilterOperator.EQ\n", + " ),\n", + " MetadataFilter(\n", + " key=\"time\", value=2020, operator=FilterOperator.LTE\n", + " ),\n", + " ],\n", + " condition=FilterCondition.AND,\n", + " ),\n", + " ),\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/examples/vector_stores/VespaIndexDemo.ipynb b/docs/docs/examples/vector_stores/VespaIndexDemo.ipynb index 9d3c3e59fd194..c180152a2b21b 100644 --- a/docs/docs/examples/vector_stores/VespaIndexDemo.ipynb +++ b/docs/docs/examples/vector_stores/VespaIndexDemo.ipynb @@ -18,7 +18,7 @@ "id": "23cf319b", "metadata": {}, "source": [ - "\"Open\n" + "\"Open\n" ] }, { diff --git a/docs/docs/vector_stores/wordlift_vector_store_demo.ipynb b/docs/docs/examples/vector_stores/WordLiftDemo.ipynb similarity index 100% rename from docs/docs/vector_stores/wordlift_vector_store_demo.ipynb rename to docs/docs/examples/vector_stores/WordLiftDemo.ipynb diff --git a/docs/docs/examples/workflow/JSONalyze_query_engine.ipynb b/docs/docs/examples/workflow/JSONalyze_query_engine.ipynb index cfef1ffc8f75e..17ee27b7d458d 100644 --- a/docs/docs/examples/workflow/JSONalyze_query_engine.ipynb +++ b/docs/docs/examples/workflow/JSONalyze_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -222,8 +222,8 @@ "\n", " raise ImportError(IMPORT_ERROR_MSG) from exc\n", "\n", - " ctx.data[\"query\"] = ev.get(\"query\")\n", - " ctx.data[\"llm\"] = ev.get(\"llm\")\n", + " await ctx.set(\"query\", ev.get(\"query\"))\n", + " await ctx.set(\"llm\", ev.get(\"llm\"))\n", "\n", " query = ev.get(\"query\")\n", " table_name = ev.get(\"table_name\")\n", @@ -273,8 +273,8 @@ " self, ctx: Context, ev: JsonAnalyzerEvent\n", " ) -> StopEvent:\n", " \"\"\"Synthesize the response.\"\"\"\n", - " llm = ctx.data.get(\"llm\")\n", - " query = ctx.data.get(\"query\")\n", + " llm = await ctx.get(\"llm\", default=None)\n", + " query = await ctx.get(\"query\", default=None)\n", "\n", " response_str = llm.predict(\n", " DEFAULT_RESPONSE_SYNTHESIS_PROMPT,\n", diff --git a/docs/docs/examples/workflow/advanced_text_to_sql.ipynb b/docs/docs/examples/workflow/advanced_text_to_sql.ipynb new file mode 100644 index 0000000000000..23ee38209f9a8 --- /dev/null +++ b/docs/docs/examples/workflow/advanced_text_to_sql.ipynb @@ -0,0 +1,1289 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "5fb96106-e662-4a0f-9673-843c685c111e", + "metadata": {}, + "source": [ + "# Workflows for Advanced Text-to-SQL\n", + "\n", + "\"Open\n", + "\n", + "In this guide we show you how to setup a text-to-SQL workflow over your data with our [workflows](https://docs.llamaindex.ai/en/stable/module_guides/workflow/#workflows) syntax.\n", + "\n", + "This gives you flexibility to enhance text-to-SQL with additional techniques. We show these in the below sections: \n", + "1. **Query-Time Table Retrieval**: Dynamically retrieve relevant tables in the text-to-SQL prompt.\n", + "2. **Query-Time Sample Row retrieval**: Embed/Index each row, and dynamically retrieve example rows for each table in the text-to-SQL prompt.\n", + "\n", + "Our out-of-the box workflows include our `NLSQLTableQueryEngine` and `SQLTableRetrieverQueryEngine`. (if you want to check out our text-to-SQL guide using these modules, take a look [here](https://docs.llamaindex.ai/en/stable/examples/index_structs/struct_indices/SQLIndexDemo.html)). This guide implements an advanced version of those modules, giving you the utmost flexibility to apply this to your own setting.\n", + "\n", + "**NOTE:** Any Text-to-SQL application should be aware that executing \n", + "arbitrary SQL queries can be a security risk. It is recommended to\n", + "take precautions as needed, such as using restricted roles, read-only\n", + "databases, sandboxing, etc." + ] + }, + { + "cell_type": "markdown", + "id": "d5c0cbbb-0d20-4233-b2f6-9e8ff3f6d04c", + "metadata": {}, + "source": [ + "## Load and Ingest Data\n", + "\n", + "\n", + "### Load Data\n", + "We use the [WikiTableQuestions dataset](https://ppasupat.github.io/WikiTableQuestions/) (Pasupat and Liang 2015) as our test dataset.\n", + "\n", + "We go through all the csv's in one folder, store each in a sqlite database (we will then build an object index over each table schema)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dae8fb42-d65a-456c-b5bf-613efa43e9f1", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install llama-index-llms-openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "468a4900-a1ba-471d-a839-8cf56dfb5cf1", + "metadata": {}, + "outputs": [], + "source": [ + "!wget \"https://github.com/ppasupat/WikiTableQuestions/releases/download/v1.0.2/WikiTableQuestions-1.0.2-compact.zip\" -O data.zip\n", + "!unzip data.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cba67be2-343b-4acd-b128-b26a79bf4504", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "from pathlib import Path\n", + "\n", + "data_dir = Path(\"./WikiTableQuestions/csv/200-csv\")\n", + "csv_files = sorted([f for f in data_dir.glob(\"*.csv\")])\n", + "dfs = []\n", + "for csv_file in csv_files:\n", + " print(f\"processing file: {csv_file}\")\n", + " try:\n", + " df = pd.read_csv(csv_file)\n", + " dfs.append(df)\n", + " except Exception as e:\n", + " print(f\"Error parsing {csv_file}: {str(e)}\")" + ] + }, + { + "cell_type": "markdown", + "id": "2bf32f7b-dad2-4aca-88b1-1ec0bc221ebb", + "metadata": {}, + "source": [ + "### Extract Table Name and Summary from each Table\n", + "\n", + "Here we use gpt-4o-mini to extract a table name (with underscores) and summary from each table with our Pydantic program." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0c3040ec-9523-492b-870b-754feffca0c7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mkdir: WikiTableQuestions_TableInfo: File exists\n" + ] + } + ], + "source": [ + "tableinfo_dir = \"WikiTableQuestions_TableInfo\"\n", + "!mkdir {tableinfo_dir}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e921b5d0-662b-4991-b904-090f6cf3eb8c", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.prompts import ChatPromptTemplate\n", + "from llama_index.core.bridge.pydantic import BaseModel, Field\n", + "from llama_index.llms.openai import OpenAI\n", + "from llama_index.core.llms import ChatMessage\n", + "\n", + "\n", + "class TableInfo(BaseModel):\n", + " \"\"\"Information regarding a structured table.\"\"\"\n", + "\n", + " table_name: str = Field(\n", + " ..., description=\"table name (must be underscores and NO spaces)\"\n", + " )\n", + " table_summary: str = Field(\n", + " ..., description=\"short, concise summary/caption of the table\"\n", + " )\n", + "\n", + "\n", + "prompt_str = \"\"\"\\\n", + "Give me a summary of the table with the following JSON format.\n", + "\n", + "- The table name must be unique to the table and describe it while being concise. \n", + "- Do NOT output a generic table name (e.g. table, my_table).\n", + "\n", + "Do NOT make the table name one of the following: {exclude_table_name_list}\n", + "\n", + "Table:\n", + "{table_str}\n", + "\n", + "Summary: \"\"\"\n", + "prompt_tmpl = ChatPromptTemplate(\n", + " message_templates=[ChatMessage.from_str(prompt_str, role=\"user\")]\n", + ")\n", + "\n", + "llm = OpenAI(model=\"gpt-4o-mini\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15277c5d-9a12-4ae9-b7ef-de58139ea66b", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "\n", + "def _get_tableinfo_with_index(idx: int) -> str:\n", + " results_gen = Path(tableinfo_dir).glob(f\"{idx}_*\")\n", + " results_list = list(results_gen)\n", + " if len(results_list) == 0:\n", + " return None\n", + " elif len(results_list) == 1:\n", + " path = results_list[0]\n", + " return TableInfo.parse_file(path)\n", + " else:\n", + " raise ValueError(\n", + " f\"More than one file matching index: {list(results_gen)}\"\n", + " )\n", + "\n", + "\n", + "table_names = set()\n", + "table_infos = []\n", + "for idx, df in enumerate(dfs):\n", + " table_info = _get_tableinfo_with_index(idx)\n", + " if table_info:\n", + " table_infos.append(table_info)\n", + " else:\n", + " while True:\n", + " df_str = df.head(10).to_csv()\n", + " table_info = llm.structured_predict(\n", + " TableInfo,\n", + " prompt_tmpl,\n", + " table_str=df_str,\n", + " exclude_table_name_list=str(list(table_names)),\n", + " )\n", + " table_name = table_info.table_name\n", + " print(f\"Processed table: {table_name}\")\n", + " if table_name not in table_names:\n", + " table_names.add(table_name)\n", + " break\n", + " else:\n", + " # try again\n", + " print(f\"Table name {table_name} already exists, trying again.\")\n", + " pass\n", + "\n", + " out_file = f\"{tableinfo_dir}/{idx}_{table_name}.json\"\n", + " json.dump(table_info.dict(), open(out_file, \"w\"))\n", + " table_infos.append(table_info)" + ] + }, + { + "cell_type": "markdown", + "id": "3f67987b-6906-42e0-b80c-3ba096b71b42", + "metadata": {}, + "source": [ + "### Put Data in SQL Database\n", + "\n", + "We use `sqlalchemy`, a popular SQL database toolkit, to load all the tables." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d315556-e83a-4181-935a-1f067b12052a", + "metadata": {}, + "outputs": [], + "source": [ + "# put data into sqlite db\n", + "from sqlalchemy import (\n", + " create_engine,\n", + " MetaData,\n", + " Table,\n", + " Column,\n", + " String,\n", + " Integer,\n", + ")\n", + "import re\n", + "\n", + "\n", + "# Function to create a sanitized column name\n", + "def sanitize_column_name(col_name):\n", + " # Remove special characters and replace spaces with underscores\n", + " return re.sub(r\"\\W+\", \"_\", col_name)\n", + "\n", + "\n", + "# Function to create a table from a DataFrame using SQLAlchemy\n", + "def create_table_from_dataframe(\n", + " df: pd.DataFrame, table_name: str, engine, metadata_obj\n", + "):\n", + " # Sanitize column names\n", + " sanitized_columns = {col: sanitize_column_name(col) for col in df.columns}\n", + " df = df.rename(columns=sanitized_columns)\n", + "\n", + " # Dynamically create columns based on DataFrame columns and data types\n", + " columns = [\n", + " Column(col, String if dtype == \"object\" else Integer)\n", + " for col, dtype in zip(df.columns, df.dtypes)\n", + " ]\n", + "\n", + " # Create a table with the defined columns\n", + " table = Table(table_name, metadata_obj, *columns)\n", + "\n", + " # Create the table in the database\n", + " metadata_obj.create_all(engine)\n", + "\n", + " # Insert data from DataFrame into the table\n", + " with engine.connect() as conn:\n", + " for _, row in df.iterrows():\n", + " insert_stmt = table.insert().values(**row.to_dict())\n", + " conn.execute(insert_stmt)\n", + " conn.commit()\n", + "\n", + "\n", + "# engine = create_engine(\"sqlite:///:memory:\")\n", + "engine = create_engine(\"sqlite:///wiki_table_questions.db\")\n", + "metadata_obj = MetaData()\n", + "for idx, df in enumerate(dfs):\n", + " tableinfo = _get_tableinfo_with_index(idx)\n", + " print(f\"Creating table: {tableinfo.table_name}\")\n", + " create_table_from_dataframe(df, tableinfo.table_name, engine, metadata_obj)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "982aa74e-323f-4c85-b0e2-1acf91d6b841", + "metadata": {}, + "outputs": [], + "source": [ + "# # setup Arize Phoenix for logging/observability\n", + "# import phoenix as px\n", + "# import llama_index.core\n", + "\n", + "# px.launch_app()\n", + "# llama_index.core.set_global_handler(\"arize_phoenix\")" + ] + }, + { + "cell_type": "markdown", + "id": "f5492e07-234e-4b08-bf37-e4fac422927b", + "metadata": {}, + "source": [ + "## Advanced Capability 1: Text-to-SQL with Query-Time Table Retrieval.\n", + "\n", + "We now show you how to setup an e2e text-to-SQL with table retrieval.\n", + "\n", + "### Define Modules\n", + "\n", + "Here we define the core modules.\n", + "1. Object index + retriever to store table schemas\n", + "2. SQLDatabase object to connect to the above tables + SQLRetriever.\n", + "3. Text-to-SQL Prompt\n", + "4. Response synthesis Prompt\n", + "5. LLM" + ] + }, + { + "cell_type": "markdown", + "id": "926c03f4-0934-4e47-a11f-a7722e66feec", + "metadata": {}, + "source": [ + "Object index, retriever, SQLDatabase" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "709f0692-ddd1-4ff6-975a-1df5abfea1ab", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.objects import (\n", + " SQLTableNodeMapping,\n", + " ObjectIndex,\n", + " SQLTableSchema,\n", + ")\n", + "from llama_index.core import SQLDatabase, VectorStoreIndex\n", + "\n", + "sql_database = SQLDatabase(engine)\n", + "\n", + "table_node_mapping = SQLTableNodeMapping(sql_database)\n", + "table_schema_objs = [\n", + " SQLTableSchema(table_name=t.table_name, context_str=t.table_summary)\n", + " for t in table_infos\n", + "] # add a SQLTableSchema for each table\n", + "\n", + "obj_index = ObjectIndex.from_objects(\n", + " table_schema_objs,\n", + " table_node_mapping,\n", + " VectorStoreIndex,\n", + ")\n", + "obj_retriever = obj_index.as_retriever(similarity_top_k=3)" + ] + }, + { + "cell_type": "markdown", + "id": "cf8ce73a-5f21-4977-87ee-7752da4fed63", + "metadata": {}, + "source": [ + "SQLRetriever + Table Parser" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3047347e-362e-46c2-9ac8-ee3b8730fe3c", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.retrievers import SQLRetriever\n", + "from typing import List\n", + "\n", + "sql_retriever = SQLRetriever(sql_database)\n", + "\n", + "\n", + "def get_table_context_str(table_schema_objs: List[SQLTableSchema]):\n", + " \"\"\"Get table context string.\"\"\"\n", + " context_strs = []\n", + " for table_schema_obj in table_schema_objs:\n", + " table_info = sql_database.get_single_table_info(\n", + " table_schema_obj.table_name\n", + " )\n", + " if table_schema_obj.context_str:\n", + " table_opt_context = \" The table description is: \"\n", + " table_opt_context += table_schema_obj.context_str\n", + " table_info += table_opt_context\n", + "\n", + " context_strs.append(table_info)\n", + " return \"\\n\\n\".join(context_strs)" + ] + }, + { + "cell_type": "markdown", + "id": "d5e693fd-18b4-4d3e-9877-7ca527d83f57", + "metadata": {}, + "source": [ + "Text-to-SQL Prompt + Output Parser" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5c708fba-0551-4e14-938d-5c32eae52595", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Given an input question, first create a syntactically correct {dialect} query to run, then look at the results of the query and return the answer. You can order the results by a relevant column to return the most interesting examples in the database.\n", + "\n", + "Never query for all the columns from a specific table, only ask for a few relevant columns given the question.\n", + "\n", + "Pay attention to use only the column names that you can see in the schema description. Be careful to not query for columns that do not exist. Pay attention to which column is in which table. Also, qualify column names with the table name when needed. You are required to use the following format, each taking one line:\n", + "\n", + "Question: Question here\n", + "SQLQuery: SQL Query to run\n", + "SQLResult: Result of the SQLQuery\n", + "Answer: Final answer here\n", + "\n", + "Only use tables listed below.\n", + "{schema}\n", + "\n", + "Question: {query_str}\n", + "SQLQuery: \n" + ] + } + ], + "source": [ + "from llama_index.core.prompts.default_prompts import DEFAULT_TEXT_TO_SQL_PROMPT\n", + "from llama_index.core import PromptTemplate\n", + "from llama_index.core.llms import ChatResponse\n", + "\n", + "\n", + "def parse_response_to_sql(chat_response: ChatResponse) -> str:\n", + " \"\"\"Parse response to SQL.\"\"\"\n", + " response = chat_response.message.content\n", + " sql_query_start = response.find(\"SQLQuery:\")\n", + " if sql_query_start != -1:\n", + " response = response[sql_query_start:]\n", + " # TODO: move to removeprefix after Python 3.9+\n", + " if response.startswith(\"SQLQuery:\"):\n", + " response = response[len(\"SQLQuery:\") :]\n", + " sql_result_start = response.find(\"SQLResult:\")\n", + " if sql_result_start != -1:\n", + " response = response[:sql_result_start]\n", + " return response.strip().strip(\"```\").strip()\n", + "\n", + "\n", + "text2sql_prompt = DEFAULT_TEXT_TO_SQL_PROMPT.partial_format(\n", + " dialect=engine.dialect.name\n", + ")\n", + "print(text2sql_prompt.template)" + ] + }, + { + "cell_type": "markdown", + "id": "bf67d6a9-f94c-4c27-b09b-2dbe1952727d", + "metadata": {}, + "source": [ + "Response Synthesis Prompt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e1757f33-f1ca-4a31-8cce-36be9bb9a989", + "metadata": {}, + "outputs": [], + "source": [ + "response_synthesis_prompt_str = (\n", + " \"Given an input question, synthesize a response from the query results.\\n\"\n", + " \"Query: {query_str}\\n\"\n", + " \"SQL: {sql_query}\\n\"\n", + " \"SQL Response: {context_str}\\n\"\n", + " \"Response: \"\n", + ")\n", + "response_synthesis_prompt = PromptTemplate(\n", + " response_synthesis_prompt_str,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "95b197ba-9c26-4e6f-9300-0e90a474911c", + "metadata": {}, + "outputs": [], + "source": [ + "# llm = OpenAI(model=\"gpt-3.5-turbo\")\n", + "llm = OpenAI(model=\"gpt-4o-mini\")" + ] + }, + { + "cell_type": "markdown", + "id": "655a23f3-06e3-45cf-9988-bf1cf5c4a5ad", + "metadata": {}, + "source": [ + "### Define Workflow\n", + "\n", + "Now that the components are in place, let's define the full workflow! " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3f3b808c-df2a-4bc1-89a3-ba75f6377f68", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.workflow import (\n", + " Workflow,\n", + " StartEvent,\n", + " StopEvent,\n", + " step,\n", + " Context,\n", + " Event,\n", + ")\n", + "\n", + "\n", + "class TableRetrieveEvent(Event):\n", + " \"\"\"Result of running table retrieval.\"\"\"\n", + "\n", + " table_context_str: str\n", + " query: str\n", + "\n", + "\n", + "class TextToSQLEvent(Event):\n", + " \"\"\"Text-to-SQL event.\"\"\"\n", + "\n", + " sql: str\n", + " query: str\n", + "\n", + "\n", + "class TextToSQLWorkflow1(Workflow):\n", + " \"\"\"Text-to-SQL Workflow that does query-time table retrieval.\"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " obj_retriever,\n", + " text2sql_prompt,\n", + " sql_retriever,\n", + " response_synthesis_prompt,\n", + " llm,\n", + " *args,\n", + " **kwargs\n", + " ) -> None:\n", + " \"\"\"Init params.\"\"\"\n", + " super().__init__(*args, **kwargs)\n", + " self.obj_retriever = obj_retriever\n", + " self.text2sql_prompt = text2sql_prompt\n", + " self.sql_retriever = sql_retriever\n", + " self.response_synthesis_prompt = response_synthesis_prompt\n", + " self.llm = llm\n", + "\n", + " @step\n", + " def retrieve_tables(\n", + " self, ctx: Context, ev: StartEvent\n", + " ) -> TableRetrieveEvent:\n", + " \"\"\"Retrieve tables.\"\"\"\n", + " table_schema_objs = self.obj_retriever.retrieve(ev.query)\n", + " table_context_str = get_table_context_str(table_schema_objs)\n", + " return TableRetrieveEvent(\n", + " table_context_str=table_context_str, query=ev.query\n", + " )\n", + "\n", + " @step\n", + " def generate_sql(\n", + " self, ctx: Context, ev: TableRetrieveEvent\n", + " ) -> TextToSQLEvent:\n", + " \"\"\"Generate SQL statement.\"\"\"\n", + " fmt_messages = self.text2sql_prompt.format_messages(\n", + " query_str=ev.query, schema=ev.table_context_str\n", + " )\n", + " chat_response = self.llm.chat(fmt_messages)\n", + " sql = parse_response_to_sql(chat_response)\n", + " return TextToSQLEvent(sql=sql, query=ev.query)\n", + "\n", + " @step\n", + " def generate_response(self, ctx: Context, ev: TextToSQLEvent) -> StopEvent:\n", + " \"\"\"Run SQL retrieval and generate response.\"\"\"\n", + " retrieved_rows = self.sql_retriever.retrieve(ev.sql)\n", + " fmt_messages = self.response_synthesis_prompt.format_messages(\n", + " sql_query=ev.sql,\n", + " context_str=str(retrieved_rows),\n", + " query_str=ev.query,\n", + " )\n", + " chat_response = llm.chat(fmt_messages)\n", + " return StopEvent(result=chat_response)" + ] + }, + { + "cell_type": "markdown", + "id": "2922dc47-1f63-4f6a-a795-c3b3e60fd354", + "metadata": {}, + "source": [ + "### Visualize Workflow\n", + "\n", + "A really nice property of workflows is that you can both visualize the execution graph as well as a trace of the most recent execution." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ac1a1bc-7cc6-49e7-a7cd-31ca9e347e5b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "text_to_sql_table_retrieval.html\n" + ] + } + ], + "source": [ + "from llama_index.utils.workflow import draw_all_possible_flows\n", + "\n", + "draw_all_possible_flows(\n", + " TextToSQLWorkflow1, filename=\"text_to_sql_table_retrieval.html\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "74f3aaca-59b0-478e-9652-7ccddd1cb4d4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
\n", + "

\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + "\n", + "\n", + "
\n", + "

\n", + "
\n", + " \n", + " \n", + "\n", + "\n", + " \n", + "
\n", + " \n", + " \n", + "
\n", + "
\n", + "\n", + " \n", + " \n", + "\n", + " \n", + " \n", + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from IPython.display import display, HTML\n", + "\n", + "# Read the contents of the HTML file\n", + "with open(\"text_to_sql_table_retrieval.html\", \"r\") as file:\n", + " html_content = file.read()\n", + "\n", + "# Display the HTML content\n", + "display(HTML(html_content))" + ] + }, + { + "cell_type": "markdown", + "id": "1e3743fc-1c75-4f6b-a97c-d95182384f58", + "metadata": {}, + "source": [ + "### Run Some Queries! \n", + "\n", + "Now we're ready to run some queries across this entire workflow." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "87f17e5c-c0d3-4a82-b451-f532bbeee4c9", + "metadata": {}, + "outputs": [], + "source": [ + "workflow = TextToSQLWorkflow1(\n", + " obj_retriever,\n", + " text2sql_prompt,\n", + " sql_retriever,\n", + " response_synthesis_prompt,\n", + " llm,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61b4c13a-16c7-41f7-a3b9-8dd77831647a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running step retrieve_tables\n", + "Step retrieve_tables produced event TableRetrieveEvent\n", + "Running step generate_sql\n", + "Step generate_sql produced event TextToSQLEvent\n", + "Running step generate_response\n", + "Step generate_response produced event StopEvent\n", + "assistant: The Notorious B.I.G was signed to Bad Boy Records in 1993.\n", + "VERBOSE: True\n", + "> Table Info: Table 'bad_boy_artists_album_release_summary' has columns: Act (VARCHAR), Year_signed (INTEGER), _Albums_released_under_Bad_Boy (VARCHAR), . The table description is: A summary of artists signed to Bad Boy Records along with the year they were signed and the number of albums they released.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "('The Notorious B.I.G', 1993, '5')\n", + "\n", + "> Table Info: Table 'filmography_of_diane_drummond' has columns: Year (INTEGER), Title (VARCHAR), Role (VARCHAR), Notes (VARCHAR), . The table description is: A list of film and television roles played by Diane Drummond from 1995 to 2001.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "(2013, 'L.A. Slasher', 'The Actress', None)\n", + "\n", + "> Table Info: Table 'progressive_rock_album_chart_positions' has columns: Year (INTEGER), Title (VARCHAR), Chart_Positions_UK (VARCHAR), Chart_Positions_US (VARCHAR), Chart_Positions_NL (VARCHAR), Comments (VARCHAR), . The table description is: Chart positions of progressive rock albums in the UK, US, and NL from 1969 to 1981.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "(1977, 'Novella', '–', '46', '–', '1977 (January in US, August in UK, as the band moved to the Warner Bros Music Group)')\n", + "\n", + "VERBOSE: True\n", + "> Table Info: Table 'bad_boy_artists_album_release_summary' has columns: Act (VARCHAR), Year_signed (INTEGER), _Albums_released_under_Bad_Boy (VARCHAR), . The table description is: A summary of artists signed to Bad Boy Records along with the year they were signed and the number of albums they released.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "('The Notorious B.I.G', 1993, '5')\n", + "\n", + "> Table Info: Table 'filmography_of_diane_drummond' has columns: Year (INTEGER), Title (VARCHAR), Role (VARCHAR), Notes (VARCHAR), . The table description is: A list of film and television roles played by Diane Drummond from 1995 to 2001.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "(2013, 'L.A. Slasher', 'The Actress', None)\n", + "\n", + "> Table Info: Table 'progressive_rock_album_chart_positions' has columns: Year (INTEGER), Title (VARCHAR), Chart_Positions_UK (VARCHAR), Chart_Positions_US (VARCHAR), Chart_Positions_NL (VARCHAR), Comments (VARCHAR), . The table description is: Chart positions of progressive rock albums in the UK, US, and NL from 1969 to 1981.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "(1977, 'Novella', '–', '46', '–', '1977 (January in US, August in UK, as the band moved to the Warner Bros Music Group)')\n", + "\n" + ] + } + ], + "source": [ + "response = await workflow.run(\n", + " query=\"What was the year that The Notorious B.I.G was signed to Bad Boy?\"\n", + ")\n", + "print(str(response))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d546d22-1326-406d-9050-7a8ec11d42d4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running step retrieve_tables\n", + "Step retrieve_tables produced event TableRetrieveEvent\n", + "Running step generate_sql\n", + "Step generate_sql produced event TextToSQLEvent\n", + "Running step generate_response\n", + "Step generate_response produced event StopEvent\n", + "assistant: William Friedkin won the Best Director award at the 1972 Academy Awards.\n" + ] + } + ], + "source": [ + "response = await workflow.run(\n", + " query=\"Who won best director in the 1972 academy awards\"\n", + ")\n", + "print(str(response))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e9ed0f49-a2ed-4a3f-9ec7-bd2562323534", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running step retrieve_tables\n", + "Step retrieve_tables produced event TableRetrieveEvent\n", + "Running step generate_sql\n", + "Step generate_sql produced event TextToSQLEvent\n", + "Running step generate_response\n", + "Step generate_response produced event StopEvent\n", + "assistant: Pasquale Preziosa has been serving since 25 February 2013 and is currently in office as the incumbent.\n" + ] + } + ], + "source": [ + "response = await workflow.run(query=\"What was the term of Pasquale Preziosa?\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "id": "02ce9fb2-3799-47a7-b170-eee83f0037d5", + "metadata": {}, + "source": [ + "## 2. Advanced Capability 2: Text-to-SQL with Query-Time Row Retrieval (along with Table Retrieval)\n", + "\n", + "One problem in the previous example is that if the user asks a query that asks for \"The Notorious BIG\" but the artist is stored as \"The Notorious B.I.G\", then the generated SELECT statement will likely not return any matches.\n", + "\n", + "We can alleviate this problem by fetching a small number of example rows per table. A naive option would be to just take the first k rows. Instead, we embed, index, and retrieve k relevant rows given the user query to give the text-to-SQL LLM the most contextually relevant information for SQL generation.\n", + "\n", + "We now extend our workflow." + ] + }, + { + "cell_type": "markdown", + "id": "cbb5818c-22be-4ae2-882d-7aa0457f0f20", + "metadata": {}, + "source": [ + "### Index Each Table\n", + "\n", + "We embed/index the rows of each table, resulting in one index per table." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5e22ed2-60c2-4b5b-b3b4-71cb9a364ba5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Indexing rows in table: academy_awards_and_nominations_1972\n", + "Indexing rows in table: annual_traffic_accident_deaths\n", + "Indexing rows in table: bad_boy_artists_album_release_summary\n", + "Indexing rows in table: bbc_radio_services_cost_comparison_2012_2013\n", + "Indexing rows in table: binary_encoding_probabilities\n", + "Indexing rows in table: boxing_match_results_summary\n", + "Indexing rows in table: cancer_related_genes_and_functions\n", + "Indexing rows in table: diane_drummond_awards_nominations\n", + "Indexing rows in table: diane_drummond_oscar_nominations_and_wins\n", + "Indexing rows in table: diane_drummond_single_chart_performance\n", + "Indexing rows in table: euro_2020_group_stage_results\n", + "Indexing rows in table: experiment_drop_events_timeline\n", + "Indexing rows in table: filmography_of_diane_drummond\n", + "Indexing rows in table: grammy_awards_summary_for_wilco\n", + "Indexing rows in table: historical_college_football_records\n", + "Indexing rows in table: italian_ministers_term_dates\n", + "Indexing rows in table: kodachrome_film_types_and_dates\n", + "Indexing rows in table: missing_persons_case_summary\n", + "Indexing rows in table: monthly_climate_statistics\n", + "Indexing rows in table: monthly_climate_statistics_summary\n", + "Indexing rows in table: monthly_weather_statistics\n", + "Indexing rows in table: multilingual_greetings_and_phrases\n", + "Indexing rows in table: municipalities_merger_summary\n", + "Indexing rows in table: new_mexico_government_officials\n", + "Indexing rows in table: norwegian_club_performance_summary\n", + "Indexing rows in table: ohio_private_schools_summary\n", + "Indexing rows in table: progressive_rock_album_chart_positions\n", + "Indexing rows in table: regional_airports_usage_summary\n", + "Indexing rows in table: south_dakota_radio_stations\n", + "Indexing rows in table: triple_crown_winners_summary\n", + "Indexing rows in table: uk_ministers_and_titles_history\n", + "Indexing rows in table: voter_registration_status_by_party\n", + "Indexing rows in table: voter_registration_summary_by_party\n", + "Indexing rows in table: yamato_district_population_density\n" + ] + } + ], + "source": [ + "from llama_index.core import VectorStoreIndex, load_index_from_storage\n", + "from sqlalchemy import text\n", + "from llama_index.core.schema import TextNode\n", + "from llama_index.core import StorageContext\n", + "import os\n", + "from pathlib import Path\n", + "from typing import Dict\n", + "\n", + "\n", + "def index_all_tables(\n", + " sql_database: SQLDatabase, table_index_dir: str = \"table_index_dir\"\n", + ") -> Dict[str, VectorStoreIndex]:\n", + " \"\"\"Index all tables.\"\"\"\n", + " if not Path(table_index_dir).exists():\n", + " os.makedirs(table_index_dir)\n", + "\n", + " vector_index_dict = {}\n", + " engine = sql_database.engine\n", + " for table_name in sql_database.get_usable_table_names():\n", + " print(f\"Indexing rows in table: {table_name}\")\n", + " if not os.path.exists(f\"{table_index_dir}/{table_name}\"):\n", + " # get all rows from table\n", + " with engine.connect() as conn:\n", + " cursor = conn.execute(text(f'SELECT * FROM \"{table_name}\"'))\n", + " result = cursor.fetchall()\n", + " row_tups = []\n", + " for row in result:\n", + " row_tups.append(tuple(row))\n", + "\n", + " # index each row, put into vector store index\n", + " nodes = [TextNode(text=str(t)) for t in row_tups]\n", + "\n", + " # put into vector store index (use OpenAIEmbeddings by default)\n", + " index = VectorStoreIndex(nodes)\n", + "\n", + " # save index\n", + " index.set_index_id(\"vector_index\")\n", + " index.storage_context.persist(f\"{table_index_dir}/{table_name}\")\n", + " else:\n", + " # rebuild storage context\n", + " storage_context = StorageContext.from_defaults(\n", + " persist_dir=f\"{table_index_dir}/{table_name}\"\n", + " )\n", + " # load index\n", + " index = load_index_from_storage(\n", + " storage_context, index_id=\"vector_index\"\n", + " )\n", + " vector_index_dict[table_name] = index\n", + "\n", + " return vector_index_dict\n", + "\n", + "\n", + "vector_index_dict = index_all_tables(sql_database)" + ] + }, + { + "cell_type": "markdown", + "id": "379ab116-08b3-4ad6-83ad-32062893f3c8", + "metadata": {}, + "source": [ + "### Define Expanded Table Parsing\n", + "\n", + "We expand the capability of our table parsing to not only return the relevant table schemas, but also return relevant rows per table schema.\n", + "\n", + "It now takes in both `table_schema_objs` (output of table retriever), but also the original `query_str` which will then be used for vector retrieval of relevant rows." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ee4d51a3-e88f-49dc-b75b-c7ce0ca770f9", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.retrievers import SQLRetriever\n", + "from typing import List\n", + "\n", + "sql_retriever = SQLRetriever(sql_database)\n", + "\n", + "\n", + "def get_table_context_and_rows_str(\n", + " query_str: str,\n", + " table_schema_objs: List[SQLTableSchema],\n", + " verbose: bool = False,\n", + "):\n", + " \"\"\"Get table context string.\"\"\"\n", + " context_strs = []\n", + " for table_schema_obj in table_schema_objs:\n", + " # first append table info + additional context\n", + " table_info = sql_database.get_single_table_info(\n", + " table_schema_obj.table_name\n", + " )\n", + " if table_schema_obj.context_str:\n", + " table_opt_context = \" The table description is: \"\n", + " table_opt_context += table_schema_obj.context_str\n", + " table_info += table_opt_context\n", + "\n", + " # also lookup vector index to return relevant table rows\n", + " vector_retriever = vector_index_dict[\n", + " table_schema_obj.table_name\n", + " ].as_retriever(similarity_top_k=2)\n", + " relevant_nodes = vector_retriever.retrieve(query_str)\n", + " if len(relevant_nodes) > 0:\n", + " table_row_context = \"\\nHere are some relevant example rows (values in the same order as columns above)\\n\"\n", + " for node in relevant_nodes:\n", + " table_row_context += str(node.get_content()) + \"\\n\"\n", + " table_info += table_row_context\n", + "\n", + " if verbose:\n", + " print(f\"> Table Info: {table_info}\")\n", + "\n", + " context_strs.append(table_info)\n", + " return \"\\n\\n\".join(context_strs)" + ] + }, + { + "cell_type": "markdown", + "id": "b2c4b1d5-c2c5-439c-a1d4-28a9f31f81bc", + "metadata": {}, + "source": [ + "### Define Expanded Workflow\n", + "\n", + "We re-use the workflow in section 1, but with an upgraded SQL parsing step after text-to-SQL generation.\n", + "\n", + "It is very easy to subclass and extend an existing workflow, and customizing existing steps to be more advanced. Here we define a new worfklow that overrides the existing `retrieve_tables` step in order to return the relevant rows." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1ad3191c-6f64-4798-b4f9-bde13230ab8a", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.workflow import (\n", + " Workflow,\n", + " StartEvent,\n", + " StopEvent,\n", + " step,\n", + " Context,\n", + " Event,\n", + ")\n", + "\n", + "\n", + "class TextToSQLWorkflow2(TextToSQLWorkflow1):\n", + " \"\"\"Text-to-SQL Workflow that does query-time row AND table retrieval.\"\"\"\n", + "\n", + " @step\n", + " def retrieve_tables(\n", + " self, ctx: Context, ev: StartEvent\n", + " ) -> TableRetrieveEvent:\n", + " \"\"\"Retrieve tables.\"\"\"\n", + " table_schema_objs = self.obj_retriever.retrieve(ev.query)\n", + " table_context_str = get_table_context_and_rows_str(\n", + " ev.query, table_schema_objs, verbose=self._verbose\n", + " )\n", + " return TableRetrieveEvent(\n", + " table_context_str=table_context_str, query=ev.query\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "8bf37b20-013c-47ea-9570-e4f98444557b", + "metadata": {}, + "source": [ + "Since the overall sequence of steps is the same, the graph should look the same." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc4451a1-b10f-4d30-9adb-333f623f56c1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "text_to_sql_table_retrieval.html\n" + ] + } + ], + "source": [ + "from llama_index.utils.workflow import draw_all_possible_flows\n", + "\n", + "draw_all_possible_flows(\n", + " TextToSQLWorkflow2, filename=\"text_to_sql_table_retrieval.html\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "892f1581-2307-47c8-90fc-e15d709c0e36", + "metadata": {}, + "source": [ + "### Run Some Queries\n", + "\n", + "We can now ask about relevant entries even if it doesn't exactly match the entry in the database." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "325aa74c-8a0d-4ec8-9bc7-89554f59e4d6", + "metadata": {}, + "outputs": [], + "source": [ + "workflow2 = TextToSQLWorkflow2(\n", + " obj_retriever,\n", + " text2sql_prompt,\n", + " sql_retriever,\n", + " response_synthesis_prompt,\n", + " llm,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48679514-22b1-4a9b-93bf-84979361c45c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running step retrieve_tables\n", + "VERBOSE: True\n", + "> Table Info: Table 'bad_boy_artists_album_release_summary' has columns: Act (VARCHAR), Year_signed (INTEGER), _Albums_released_under_Bad_Boy (VARCHAR), . The table description is: A summary of artists signed to Bad Boy Records along with the year they were signed and the number of albums they released.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "('The Notorious B.I.G', 1993, '5')\n", + "\n", + "> Table Info: Table 'filmography_of_diane_drummond' has columns: Year (INTEGER), Title (VARCHAR), Role (VARCHAR), Notes (VARCHAR), . The table description is: A list of film and television roles played by Diane Drummond from 1995 to 2001.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "(2013, 'L.A. Slasher', 'The Actress', None)\n", + "\n", + "> Table Info: Table 'progressive_rock_album_chart_positions' has columns: Year (INTEGER), Title (VARCHAR), Chart_Positions_UK (VARCHAR), Chart_Positions_US (VARCHAR), Chart_Positions_NL (VARCHAR), Comments (VARCHAR), . The table description is: Chart positions of progressive rock albums in the UK, US, and NL from 1969 to 1981.\n", + "Here are some relevant example rows (values in the same order as columns above)\n", + "(1977, 'Novella', '–', '46', '–', '1977 (January in US, August in UK, as the band moved to the Warner Bros Music Group)')\n", + "\n", + "Step retrieve_tables produced event TableRetrieveEvent\n", + "Running step generate_sql\n", + "Step generate_sql produced event TextToSQLEvent\n", + "Running step generate_response\n", + "Step generate_response produced event StopEvent\n", + "assistant: The Notorious B.I.G. was signed to Bad Boy Records in 1993.\n" + ] + } + ], + "source": [ + "response = await workflow2.run(\n", + " query=\"What was the year that The Notorious BIG was signed to Bad Boy?\"\n", + ")\n", + "print(str(response))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "llama_index_v3", + "language": "python", + "name": "llama_index_v3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/examples/workflow/citation_query_engine.ipynb b/docs/docs/examples/workflow/citation_query_engine.ipynb index cda23f6807b67..2090b82b3969a 100644 --- a/docs/docs/examples/workflow/citation_query_engine.ipynb +++ b/docs/docs/examples/workflow/citation_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -263,7 +263,7 @@ " print(f\"Query the database with: {query}\")\n", "\n", " # store the query in the global context\n", - " ctx.data[\"query\"] = query\n", + " await ctx.set(\"query\", query)\n", "\n", " if ev.index is None:\n", " print(\"Index is empty, load some documents before querying!\")\n", @@ -276,7 +276,7 @@ "\n", " @step\n", " async def create_citation_nodes(\n", - " self, ctx: Context, ev: RetrieverEvent\n", + " self, ev: RetrieverEvent\n", " ) -> CreateCitationsEvent:\n", " \"\"\"\n", " Modify retrieved nodes to create granular sources for citations.\n", @@ -323,7 +323,7 @@ " ) -> StopEvent:\n", " \"\"\"Return a streaming response using the retrieved nodes.\"\"\"\n", " llm = OpenAI(model=\"gpt-4o-mini\")\n", - " query = ctx.data.get(\"query\")\n", + " query = await ctx.get(\"query\", default=None)\n", "\n", " synthesizer = get_response_synthesizer(\n", " llm=llm,\n", diff --git a/docs/docs/examples/workflow/corrective_rag_pack.ipynb b/docs/docs/examples/workflow/corrective_rag_pack.ipynb index 2653662cbdc3c..73809573d51c5 100644 --- a/docs/docs/examples/workflow/corrective_rag_pack.ipynb +++ b/docs/docs/examples/workflow/corrective_rag_pack.ipynb @@ -44,7 +44,7 @@ "source": [ "import os\n", "\n", - "os.environ[\"OPENAI_API_KEY\"] = \"\"\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-proj-...\"\n", "tavily_ai_api_key = \"\"" ] }, @@ -94,10 +94,11 @@ "metadata": {}, "source": [ "The following events are needed:\n", - "1. `RetrieveEvent` - Event containing information about the retrieved nodes.\n", - "2. `RelevanceEvalEvent` - Event containing a list of the results of the relevance evaluation.\n", - "3. `TextExtractEvent` - Event containing the concatenated string of relevant text from relevant nodes.\n", - "4. `QueryEvent` - Event containing both the relevant text and search text." + "1. `PrepEvent` - Event signifying that the index and other objects are prepared.\n", + "2. `RetrieveEvent` - Event containing information about the retrieved nodes.\n", + "3. `RelevanceEvalEvent` - Event containing a list of the results of the relevance evaluation.\n", + "4. `TextExtractEvent` - Event containing the concatenated string of relevant text from relevant nodes.\n", + "5. `QueryEvent` - Event containing both the relevant text and search text." ] }, { @@ -110,6 +111,12 @@ "from llama_index.core.schema import NodeWithScore\n", "\n", "\n", + "class PrepEvent(Event):\n", + " \"\"\"Prep event (prepares for retrieval).\"\"\"\n", + "\n", + " pass\n", + "\n", + "\n", "class RetrieveEvent(Event):\n", " \"\"\"Retrieve event (gets retrieved nodes).\"\"\"\n", "\n", @@ -205,47 +212,70 @@ " async def ingest(self, ctx: Context, ev: StartEvent) -> StopEvent | None:\n", " \"\"\"Ingest step (for ingesting docs and initializing index).\"\"\"\n", " documents: list[Document] | None = ev.get(\"documents\")\n", - " tavily_ai_apikey: str | None = ev.get(\"tavily_ai_apikey\")\n", "\n", - " if any(i is None for i in [documents, tavily_ai_apikey]):\n", + " if documents is None:\n", + " return None\n", + "\n", + " index = VectorStoreIndex.from_documents(documents)\n", + "\n", + " return StopEvent(result=index)\n", + "\n", + " @step\n", + " async def prepare_for_retrieval(\n", + " self, ctx: Context, ev: StartEvent\n", + " ) -> PrepEvent | None:\n", + " \"\"\"Prepare for retrieval.\"\"\"\n", + "\n", + " query_str: str | None = ev.get(\"query_str\")\n", + " retriever_kwargs: dict | None = ev.get(\"retriever_kwargs\", {})\n", + "\n", + " if query_str is None:\n", " return None\n", "\n", + " tavily_ai_apikey: str | None = ev.get(\"tavily_ai_apikey\")\n", + " index = ev.get(\"index\")\n", + "\n", " llm = OpenAI(model=\"gpt-4\")\n", - " ctx.data[\"relevancy_pipeline\"] = QueryPipeline(\n", + " await ctx.set(\"relevancy_pipeline\", QueryPipeline(\n", " chain=[DEFAULT_RELEVANCY_PROMPT_TEMPLATE, llm]\n", - " )\n", - " ctx.data[\"transform_query_pipeline\"] = QueryPipeline(\n", + " ))\n", + " await ctx.set(\"transform_query_pipeline\", QueryPipeline(\n", " chain=[DEFAULT_TRANSFORM_QUERY_TEMPLATE, llm]\n", - " )\n", + " ))\n", "\n", - " ctx.data[\"llm\"] = llm\n", - " ctx.data[\"index\"] = VectorStoreIndex.from_documents(documents)\n", - " ctx.data[\"tavily_tool\"] = TavilyToolSpec(api_key=tavily_ai_apikey)\n", + " await ctx.set(\"llm\", llm)\n", + " await ctx.set(\"index\", index)\n", + " await ctx.set(\"tavily_tool\", TavilyToolSpec(api_key=tavily_ai_apikey))\n", "\n", - " return StopEvent()\n", + " await ctx.set(\"query_str\", query_str)\n", + " await ctx.set(\"retriever_kwargs\", retriever_kwargs)\n", + "\n", + " return PrepEvent()\n", "\n", " @step\n", " async def retrieve(\n", - " self, ctx: Context, ev: StartEvent\n", + " self, ctx: Context, ev: PrepEvent\n", " ) -> RetrieveEvent | None:\n", " \"\"\"Retrieve the relevant nodes for the query.\"\"\"\n", - " query_str = ev.get(\"query_str\")\n", - " retriever_kwargs = ev.get(\"retriever_kwargs\", {})\n", + " query_str = await ctx.get(\"query_str\")\n", + " retriever_kwargs = await ctx.get(\"retriever_kwargs\")\n", "\n", " if query_str is None:\n", " return None\n", "\n", - " if \"index\" not in ctx.data or \"tavily_tool\" not in ctx.data:\n", + " index = await ctx.get(\"index\", default=None)\n", + " tavily_tool = await ctx.get(\"tavily_tool\", default=None)\n", + " if not (index or tavily_tool)\n", " raise ValueError(\n", " \"Index and tavily tool must be constructed. Run with 'documents' and 'tavily_ai_apikey' params first.\"\n", " )\n", "\n", - " retriever: BaseRetriever = ctx.data[\"index\"].as_retriever(\n", + " retriever: BaseRetriever = index.as_retriever(\n", " **retriever_kwargs\n", " )\n", " result = retriever.retrieve(query_str)\n", - " ctx.data[\"retrieved_nodes\"] = result\n", - " ctx.data[\"query_str\"] = query_str\n", + " await ctx.set(\"retrieved_nodes\", result)\n", + " await ctx.set(\"query_str\", query_str)\n", " return RetrieveEvent(retrieved_nodes=result)\n", "\n", " @step\n", @@ -254,16 +284,16 @@ " ) -> RelevanceEvalEvent:\n", " \"\"\"Evaluate relevancy of retrieved documents with the query.\"\"\"\n", " retrieved_nodes = ev.retrieved_nodes\n", - " query_str = ctx.data[\"query_str\"]\n", + " query_str = await ctx.get(\"query_str\")\n", "\n", " relevancy_results = []\n", " for node in retrieved_nodes:\n", - " relevancy = ctx.data[\"relevancy_pipeline\"].run(\n", + " relevancy = await ctx.get(\"relevancy_pipeline\").run(\n", " context_str=node.text, query_str=query_str\n", " )\n", " relevancy_results.append(relevancy.message.content.lower().strip())\n", "\n", - " ctx.data[\"relevancy_results\"] = relevancy_results\n", + " await ctx.set(\"relevancy_results\", relevancy_results)\n", " return RelevanceEvalEvent(relevant_results=relevancy_results)\n", "\n", " @step\n", @@ -271,7 +301,7 @@ " self, ctx: Context, ev: RelevanceEvalEvent\n", " ) -> TextExtractEvent:\n", " \"\"\"Extract relevant texts from retrieved documents.\"\"\"\n", - " retrieved_nodes = ctx.data[\"retrieved_nodes\"]\n", + " retrieved_nodes = await ctx.get(\"retrieved_nodes\")\n", " relevancy_results = ev.relevant_results\n", "\n", " relevant_texts = [\n", @@ -289,18 +319,15 @@ " ) -> QueryEvent:\n", " \"\"\"Search the transformed query with Tavily API.\"\"\"\n", " relevant_text = ev.relevant_text\n", - " relevancy_results = ctx.data[\"relevancy_results\"]\n", - " query_str = ctx.data[\"query_str\"]\n", + " relevancy_results = await ctx.get(\"relevancy_results\")\n", + " query_str = await ctx.get(\"query_str\")\n", "\n", " # If any document is found irrelevant, transform the query string for better search results.\n", " if \"no\" in relevancy_results:\n", - " transformed_query_str = (\n", - " ctx.data[\"transform_query_pipeline\"]\n", - " .run(query_str=query_str)\n", - " .message.content\n", - " )\n", + " qp = await ctx.get(\"transform_query_pipeline\")\n", + " transformed_query_str = (qp.run(query_str=query_str).message.content)\n", " # Conduct a search with the transformed query string and collect the results.\n", - " search_results = ctx.data[\"tavily_tool\"].search(\n", + " search_results = await ctx.get(\"tavily_tool\").search(\n", " transformed_query_str, max_results=5\n", " )\n", " search_text = \"\\n\".join([result.text for result in search_results])\n", @@ -314,7 +341,7 @@ " \"\"\"Get result with relevant text.\"\"\"\n", " relevant_text = ev.relevant_text\n", " search_text = ev.search_text\n", - " query_str = ctx.data[\"query_str\"]\n", + " query_str = await ctx.get(\"query_str\")\n", "\n", " documents = [Document(text=relevant_text + \"\\n\" + search_text)]\n", " index = SummaryIndex.from_documents(documents)\n", @@ -340,7 +367,7 @@ "\n", "documents = SimpleDirectoryReader(\"./data\").load_data()\n", "workflow = CorrectiveRAGWorkflow()\n", - "await workflow.run(documents=documents, tavily_ai_apikey=tavily_ai_api_key)" + "index = await workflow.run(documents=documents)" ] }, { @@ -364,7 +391,11 @@ "source": [ "from IPython.display import Markdown, display\n", "\n", - "response = await workflow.run(query_str=\"How was Llama2 pretrained?\")\n", + "response = await workflow.run(\n", + " query_str=\"How was Llama2 pretrained?\",\n", + " index=index,\n", + " tavily_ai_apikey=tavily_ai_api_key,\n", + ")\n", "display(Markdown(str(response)))" ] }, diff --git a/docs/docs/examples/workflow/long_rag_pack.ipynb b/docs/docs/examples/workflow/long_rag_pack.ipynb index d8ba947a60e98..5995ed63fca4f 100644 --- a/docs/docs/examples/workflow/long_rag_pack.ipynb +++ b/docs/docs/examples/workflow/long_rag_pack.ipynb @@ -42,7 +42,7 @@ "source": [ "import os\n", "\n", - "os.environ[\"OPENAI_API_KEY\"] = \"\"" + "os.environ[\"OPENAI_API_KEY\"] = \"sk-proj-...\"" ] }, { @@ -205,10 +205,7 @@ "from llama_index.core.vector_stores.simple import BasePydanticVectorStore\n", "from llama_index.core.schema import QueryBundle, NodeWithScore\n", "from llama_index.core.vector_stores.types import VectorStoreQuery\n", - "from llama_index.core.settings import (\n", - " Settings,\n", - " embed_model_from_settings_or_context,\n", - ")\n", + "from llama_index.core.settings import Settings\n", "\n", "\n", "class LongRAGRetriever(BaseRetriever):\n", @@ -236,9 +233,7 @@ "\n", " self._similarity_top_k = similarity_top_k\n", " self._vec_store = vector_store\n", - " self._embed_model = embed_model_from_settings_or_context(\n", - " Settings, None\n", - " )\n", + " self._embed_model = Settings.embed_model\n", "\n", " def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:\n", " \"\"\"Retrieves.\n", @@ -436,7 +431,6 @@ " vector_store=ev.index.vector_store,\n", " )\n", " query_eng = RetrieverQueryEngine.from_args(retriever, ev.llm)\n", - " ctx.data[\"query_eng\"] = query_eng\n", "\n", " return StopEvent(\n", " result={\n", @@ -458,12 +452,11 @@ " StopEvent | None: stop event with result\n", " \"\"\"\n", " query_str: str | None = ev.get(\"query_str\")\n", + " query_eng = ev.get(\"query_eng\")\n", "\n", " if query_str is None:\n", " return None\n", "\n", - " query_eng: RetrieverQueryEngine = ctx.data.get(\"query_eng\")\n", - "\n", " result = query_eng.query(query_str)\n", " return StopEvent(result=result)" ] @@ -490,20 +483,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'retriever': <__main__.LongRAGRetriever at 0x13cb0e250>,\n", - " 'query_engine': ,\n", - " 'index': }" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "from llama_index.llms.openai import OpenAI\n", "\n", @@ -512,7 +492,7 @@ "data_dir = \"data\"\n", "\n", "# initialize the workflow\n", - "await wf.run(\n", + "result = await wf.run(\n", " data_dir=data_dir,\n", " llm=llm,\n", " chunk_size=DEFAULT_CHUNK_SIZE,\n", @@ -529,9 +509,9 @@ { "data": { "text/markdown": [ - "Pittsburgh can become a startup hub by leveraging its increasing population of young people, particularly those aged 25 to 29, who are crucial for the startup ecosystem. Encouraging the youth-driven food boom, preserving historic buildings, and making the city more bicycle and pedestrian-friendly are key steps. Additionally, Carnegie Mellon University (CMU) can contribute by focusing on being an even better research university and attracting top talent. Promoting tolerance and creating a supportive environment for startups are also essential.\n", + "Pittsburgh can become a startup hub by leveraging its increasing population of young people, particularly those aged 25 to 29, who are crucial for the startup ecosystem. The city should encourage the youth-driven food boom, preserve historic buildings, and make the city more bicycle and pedestrian-friendly. Additionally, Carnegie Mellon University (CMU) should focus on being an even better research university to attract ambitious talent. The city should also foster a culture of tolerance and gradually build an investor community.\n", "\n", - "There are two types of moderates: intentional moderates and accidental moderates. Intentional moderates deliberately choose positions midway between the extremes of right and left. Accidental moderates, on the other hand, make up their own minds about each issue, and their opinions average out to be moderate because they find the far right and far left to be roughly equally wrong." + "There are two types of moderates: intentional moderates and accidental moderates. Intentional moderates deliberately choose positions midway between the extremes of right and left, while accidental moderates form their opinions independently on each issue, resulting in a broad range of views that average to a moderate position." ], "text/plain": [ "" @@ -546,7 +526,8 @@ "\n", "# run a query\n", "res = await wf.run(\n", - " query_str=\"How can Pittsburgh become a startup hub, and what are the two types of moderates?\"\n", + " query_str=\"How can Pittsburgh become a startup hub, and what are the two types of moderates?\",\n", + " query_eng=result[\"query_engine\"],\n", ")\n", "display(Markdown(str(res)))" ] diff --git a/docs/docs/examples/workflow/multi_step_query_engine.ipynb b/docs/docs/examples/workflow/multi_step_query_engine.ipynb index 2029493ebeef8..c4cc343736d84 100644 --- a/docs/docs/examples/workflow/multi_step_query_engine.ipynb +++ b/docs/docs/examples/workflow/multi_step_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -206,7 +206,7 @@ " source_nodes = []\n", "\n", " query = ev.get(\"query\")\n", - " ctx.data[\"query\"] = ev.get(\"query\")\n", + " await ctx.set(\"query\", ev.get(\"query\"))\n", "\n", " llm = Settings.llm\n", " stop_fn = self.default_stop_fn\n", @@ -274,8 +274,9 @@ " ) -> StopEvent:\n", " \"\"\"Synthesize the response.\"\"\"\n", " response_synthesizer = get_response_synthesizer()\n", + " query = await ctx.get(\"query\", default=None)\n", " final_response = await response_synthesizer.asynthesize(\n", - " query=ctx.data.get(\"query\"),\n", + " query=query,\n", " nodes=ev.nodes,\n", " additional_source_nodes=ev.source_nodes,\n", " )\n", diff --git a/docs/docs/examples/workflow/multi_strategy_workflow.ipynb b/docs/docs/examples/workflow/multi_strategy_workflow.ipynb index 1c0f4fc5a9a46..8b99576a27f94 100644 --- a/docs/docs/examples/workflow/multi_strategy_workflow.ipynb +++ b/docs/docs/examples/workflow/multi_strategy_workflow.ipynb @@ -218,14 +218,17 @@ " self, ctx: Context, ev: StartEvent | JudgeEvent\n", " ) -> BadQueryEvent | NaiveRAGEvent | HighTopKEvent | RerankEvent:\n", " # initialize\n", - " if not hasattr(ctx.data, \"llm\"):\n", - " ctx.data[\"llm\"] = OpenAI(model=\"gpt-4o\", temperature=0.1)\n", - " ctx.data[\"index\"] = self.load_or_create_index(\"data\", \"storage\")\n", + " llm = await ctx.get(\"llm\", default=None)\n", + " if llm is None:\n", + " await ctx.set(\"llm\", OpenAI(model=\"gpt-4o\", temperature=0.1))\n", + " await ctx.set(\n", + " \"index\", self.load_or_create_index(\"data\", \"storage\")\n", + " )\n", "\n", " # we use a chat engine so it remembers previous interactions\n", - " ctx.data[\"judge\"] = SimpleChatEngine.from_defaults()\n", + " await ctx.set(\"judge\", SimpleChatEngine.from_defaults())\n", "\n", - " response = ctx.data[\"judge\"].chat(\n", + " response = await ctx.get(\"judge\").chat(\n", " f\"\"\"\n", " Given a user query, determine if this is likely to yield good results from a RAG system as-is. If it's good, return 'good', if it's bad, return 'bad'.\n", " Good queries use a lot of relevant keywords and are detailed. Bad queries are vague or ambiguous.\n", @@ -246,7 +249,7 @@ " async def improve_query(\n", " self, ctx: Context, ev: BadQueryEvent\n", " ) -> JudgeEvent:\n", - " response = ctx.data[\"llm\"].complete(\n", + " response = await ctx.get(\"llm\").complete(\n", " f\"\"\"\n", " This is a query to a RAG system: {ev.query}\n", "\n", @@ -259,7 +262,7 @@ " async def naive_rag(\n", " self, ctx: Context, ev: NaiveRAGEvent\n", " ) -> ResponseEvent:\n", - " index = ctx.data[\"index\"]\n", + " index = await ctx.get(\"index\")\n", " engine = index.as_query_engine(similarity_top_k=5)\n", " response = engine.query(ev.query)\n", " print(\"Naive response:\", response)\n", @@ -271,7 +274,7 @@ " async def high_top_k(\n", " self, ctx: Context, ev: HighTopKEvent\n", " ) -> ResponseEvent:\n", - " index = ctx.data[\"index\"]\n", + " index = await ctx.get(\"index\")\n", " engine = index.as_query_engine(similarity_top_k=20)\n", " response = engine.query(ev.query)\n", " print(\"High top k response:\", response)\n", @@ -281,8 +284,8 @@ "\n", " @step\n", " async def rerank(self, ctx: Context, ev: RerankEvent) -> ResponseEvent:\n", - " index = ctx.data[\"index\"]\n", - " reranker = RankGPTRerank(top_n=5, llm=ctx.data[\"llm\"])\n", + " index = await ctx.get(\"index\")\n", + " reranker = RankGPTRerank(top_n=5, llm=await ctx.get(\"llm\"))\n", " retriever = index.as_retriever(similarity_top_k=20)\n", " engine = RetrieverQueryEngine.from_args(\n", " retriever=retriever,\n", @@ -300,7 +303,7 @@ " if ready is None:\n", " return None\n", "\n", - " response = ctx.data[\"judge\"].chat(\n", + " response = await ctx.get(\"judge\").chat(\n", " f\"\"\"\n", " A user has provided a query and 3 different strategies have been used\n", " to try to answer the query. Your job is to decide which strategy best\n", diff --git a/docs/docs/examples/workflow/parallel_execution.ipynb b/docs/docs/examples/workflow/parallel_execution.ipynb index 03fde6084fc70..68536e65c64ff 100644 --- a/docs/docs/examples/workflow/parallel_execution.ipynb +++ b/docs/docs/examples/workflow/parallel_execution.ipynb @@ -108,7 +108,7 @@ " @step\n", " async def start(self, ctx: Context, ev: StartEvent) -> ProcessEvent:\n", " data_list = [\"A\", \"B\", \"C\"]\n", - " ctx.data[\"num_to_collect\"] = len(data_list)\n", + " await ctx.set(\"num_to_collect\", len(data_list))\n", " for item in data_list:\n", " self.send_event(ProcessEvent(data=item))\n", " return None\n", @@ -125,7 +125,7 @@ " async def combine_results(\n", " self, ctx: Context, ev: ResultEvent\n", " ) -> StopEvent | None:\n", - " num_to_collect = ctx.data[\"num_to_collect\"]\n", + " num_to_collect = await ctx.get(\"num_to_collect\")\n", " results = ctx.collect_events(ev, [ResultEvent] * num_to_collect)\n", " if results is None:\n", " return None\n", @@ -138,7 +138,7 @@ " @step\n", " async def start(self, ctx: Context, ev: StartEvent) -> ProcessEvent:\n", " data_list = [\"A\", \"B\", \"C\"]\n", - " ctx.data[\"num_to_collect\"] = len(data_list)\n", + " await ctx.set(\"num_to_collect\", len(data_list))\n", " for item in data_list:\n", " self.send_event(ProcessEvent(data=item))\n", " return None\n", @@ -155,7 +155,7 @@ " async def combine_results(\n", " self, ctx: Context, ev: ResultEvent\n", " ) -> StopEvent | None:\n", - " num_to_collect = ctx.data[\"num_to_collect\"]\n", + " num_to_collect = await ctx.get(\"num_to_collect\")\n", " results = ctx.collect_events(ev, [ResultEvent] * num_to_collect)\n", " if results is None:\n", " return None\n", diff --git a/docs/docs/examples/workflow/rag.ipynb b/docs/docs/examples/workflow/rag.ipynb index afcece0e5cb86..a1d94d4453027 100644 --- a/docs/docs/examples/workflow/rag.ipynb +++ b/docs/docs/examples/workflow/rag.ipynb @@ -44,7 +44,7 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install \"llama-index-core>=0.10.43\" \"openinference-instrumentation-llama-index>=2.2.2\" \"opentelemetry-proto>=1.12.0\" opentelemetry-exporter-otlp opentelemetry-sdk" + "%pip install \"openinference-instrumentation-llama-index>=3.0.0\" \"opentelemetry-proto>=1.12.0\" opentelemetry-exporter-otlp opentelemetry-sdk" ] }, { @@ -190,11 +190,11 @@ " return None\n", "\n", " documents = SimpleDirectoryReader(dirname).load_data()\n", - " ctx.data[\"index\"] = VectorStoreIndex.from_documents(\n", + " index = VectorStoreIndex.from_documents(\n", " documents=documents,\n", " embed_model=OpenAIEmbedding(model_name=\"text-embedding-3-small\"),\n", " )\n", - " return StopEvent(result=f\"Indexed {len(documents)} documents.\")\n", + " return StopEvent(result=index)\n", "\n", " @step\n", " async def retrieve(\n", @@ -202,22 +202,23 @@ " ) -> RetrieverEvent | None:\n", " \"Entry point for RAG, triggered by a StartEvent with `query`.\"\n", " query = ev.get(\"query\")\n", + " index = ev.get(\"index\")\n", + "\n", " if not query:\n", " return None\n", "\n", " print(f\"Query the database with: {query}\")\n", "\n", " # store the query in the global context\n", - " ctx.data[\"query\"] = query\n", + " await ctx.set(\"query\", query)\n", "\n", " # get the index from the global context\n", - " index = ctx.data.get(\"index\")\n", " if index is None:\n", " print(\"Index is empty, load some documents before querying!\")\n", " return None\n", "\n", " retriever = index.as_retriever(similarity_top_k=2)\n", - " nodes = retriever.retrieve(query)\n", + " nodes = await retriever.aretrieve(query)\n", " print(f\"Retrieved {len(nodes)} nodes.\")\n", " return RetrieverEvent(nodes=nodes)\n", "\n", @@ -227,9 +228,9 @@ " ranker = LLMRerank(\n", " choice_batch_size=5, top_n=3, llm=OpenAI(model=\"gpt-4o-mini\")\n", " )\n", - " print(ctx.data.get(\"query\"), flush=True)\n", + " print(await ctx.get(\"query\", default=None), flush=True)\n", " new_nodes = ranker.postprocess_nodes(\n", - " ev.nodes, query_str=ctx.data.get(\"query\")\n", + " ev.nodes, query_str=await ctx.get(\"query\", default=None)\n", " )\n", " print(f\"Reranked nodes to {len(new_nodes)}\")\n", " return RerankEvent(nodes=new_nodes)\n", @@ -239,7 +240,7 @@ " \"\"\"Return a streaming response using reranked nodes.\"\"\"\n", " llm = OpenAI(model=\"gpt-4o-mini\")\n", " summarizer = CompactAndRefine(llm=llm, streaming=True, verbose=True)\n", - " query = ctx.data.get(\"query\")\n", + " query = await ctx.get(\"query\", default=None)\n", "\n", " response = await summarizer.asynthesize(query, nodes=ev.nodes)\n", " return StopEvent(result=response)" @@ -268,23 +269,12 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'Indexed 77 documents.'" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "w = RAGWorkflow()\n", "\n", "# Ingest the documents\n", - "await w.run(dirname=\"data\")" + "index = await w.run(dirname=\"data\")" ] }, { @@ -298,17 +288,19 @@ "text": [ "Query the database with: How was Llama2 trained?\n", "Retrieved 2 nodes.\n", - "Llama 2 was trained through a multi-step process that began with pretraining using publicly available online sources. This was followed by the creation of an initial version of Llama 2-Chat through supervised fine-tuning. The model was then iteratively refined using Reinforcement Learning with Human Feedback (RLHF) methodologies, which included techniques like rejection sampling and Proximal Policy Optimization (PPO). \n", + "How was Llama2 trained?\n", + "Reranked nodes to 2\n", + "Llama 2 was trained through a multi-step process that began with pretraining using publicly available online sources. This was followed by the creation of an initial version of Llama 2-Chat through supervised fine-tuning. The model was then iteratively refined using Reinforcement Learning with Human Feedback (RLHF) methodologies, which included rejection sampling and Proximal Policy Optimization (PPO). \n", "\n", - "During pretraining, the model utilized an optimized auto-regressive transformer architecture, incorporating robust data cleaning, updated data mixes, and training on a significantly larger dataset of 2 trillion tokens. The training process also involved increased context length and the use of grouped-query attention (GQA) to enhance inference scalability. \n", + "During pretraining, the model utilized an optimized auto-regressive transformer architecture, incorporating robust data cleaning, updated data mixes, and training on a significantly larger dataset of 2 trillion tokens. The training process also involved increased context length and the use of grouped-query attention (GQA) to enhance inference scalability.\n", "\n", - "The training employed the AdamW optimizer with specific hyperparameters, a cosine learning rate schedule, and gradient clipping. The models were pretrained on Meta’s Research SuperCluster and internal production clusters, utilizing NVIDIA A100 GPUs for the training process." + "The training employed the AdamW optimizer with specific hyperparameters, including a cosine learning rate schedule and gradient clipping. The models were pretrained on Meta’s Research SuperCluster and internal production clusters, utilizing NVIDIA A100 GPUs." ] } ], "source": [ "# Run a query\n", - "result = await w.run(query=\"How was Llama2 trained?\")\n", + "result = await w.run(query=\"How was Llama2 trained?\", index=index)\n", "async for chunk in result.async_response_gen():\n", " print(chunk, end=\"\", flush=True)" ] diff --git a/docs/docs/examples/workflow/react_agent.ipynb b/docs/docs/examples/workflow/react_agent.ipynb index c633c836ff7fb..49ba102995d72 100644 --- a/docs/docs/examples/workflow/react_agent.ipynb +++ b/docs/docs/examples/workflow/react_agent.ipynb @@ -30,7 +30,7 @@ "source": [ "import os\n", "\n", - "os.environ[\"OPENAI_API_KEY\"] = \"sk-proj-...\"" + "os.environ[\"OPENAI_API_KEY\"] = \"sk-proj--...\"" ] }, { @@ -218,7 +218,7 @@ " self.memory.put(user_msg)\n", "\n", " # clear current reasoning\n", - " ctx.data[\"current_reasoning\"] = []\n", + " await ctx.set(\"current_reasoning\", [])\n", "\n", " return PrepEvent()\n", "\n", @@ -228,7 +228,7 @@ " ) -> InputEvent:\n", " # get chat history\n", " chat_history = self.memory.get()\n", - " current_reasoning = ctx.data.get(\"current_reasoning\", [])\n", + " current_reasoning = await ctx.get(\"current_reasoning\", default=[])\n", " llm_input = self.formatter.format(\n", " self.tools, chat_history, current_reasoning=current_reasoning\n", " )\n", @@ -244,7 +244,9 @@ "\n", " try:\n", " reasoning_step = self.output_parser.parse(response.message.content)\n", - " ctx.data.get(\"current_reasoning\", []).append(reasoning_step)\n", + " (await ctx.get(\"current_reasoning\", default=[])).append(\n", + " reasoning_step\n", + " )\n", " if reasoning_step.is_done:\n", " self.memory.put(\n", " ChatMessage(\n", @@ -255,7 +257,9 @@ " result={\n", " \"response\": reasoning_step.response,\n", " \"sources\": [*self.sources],\n", - " \"reasoning\": ctx.data.get(\"current_reasoning\", []),\n", + " \"reasoning\": await ctx.get(\n", + " \"current_reasoning\", default=[]\n", + " ),\n", " }\n", " )\n", " elif isinstance(reasoning_step, ActionReasoningStep):\n", @@ -271,7 +275,7 @@ " ]\n", " )\n", " except Exception as e:\n", - " ctx.data.get(\"current_reasoning\", []).append(\n", + " (await ctx.get(\"current_reasoning\", default=[])).append(\n", " ObservationReasoningStep(\n", " observation=f\"There was an error in parsing my reasoning: {e}\"\n", " )\n", @@ -291,7 +295,7 @@ " for tool_call in tool_calls:\n", " tool = tools_by_name.get(tool_call.tool_name)\n", " if not tool:\n", - " ctx.data.get(\"current_reasoning\", []).append(\n", + " (await ctx.get(\"current_reasoning\", default=[])).append(\n", " ObservationReasoningStep(\n", " observation=f\"Tool {tool_call.tool_name} does not exist\"\n", " )\n", @@ -301,11 +305,11 @@ " try:\n", " tool_output = tool(**tool_call.tool_kwargs)\n", " self.sources.append(tool_output)\n", - " ctx.data.get(\"current_reasoning\", []).append(\n", + " (await ctx.get(\"current_reasoning\", default=[])).append(\n", " ObservationReasoningStep(observation=tool_output.content)\n", " )\n", " except Exception as e:\n", - " ctx.data.get(\"current_reasoning\", []).append(\n", + " (await ctx.get(\"current_reasoning\", default=[])).append(\n", " ObservationReasoningStep(\n", " observation=f\"Error calling tool {tool.metadata.get_name()}: {e}\"\n", " )\n", @@ -419,7 +423,13 @@ "Step new_user_msg produced event PrepEvent\n", "Running step prepare_chat_history\n", "Step prepare_chat_history produced event InputEvent\n", - "Running step handle_llm_input\n", + "Running step handle_llm_input\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "Step handle_llm_input produced event ToolCallEvent\n", "Running step handle_tool_calls\n", "Step handle_tool_calls produced event PrepEvent\n", diff --git a/docs/docs/examples/workflow/reflection.ipynb b/docs/docs/examples/workflow/reflection.ipynb index 618ba9798a8fd..1dae796bc8559 100644 --- a/docs/docs/examples/workflow/reflection.ipynb +++ b/docs/docs/examples/workflow/reflection.ipynb @@ -165,11 +165,11 @@ " async def extract(\n", " self, ctx: Context, ev: StartEvent | ValidationErrorEvent\n", " ) -> StopEvent | ExtractionDone:\n", - " current_retries = ctx.data.get(\"retries\", 0)\n", + " current_retries = await ctx.get(\"retries\", default=0)\n", " if current_retries >= self.max_retries:\n", " return StopEvent(result=\"Max retries reached\")\n", " else:\n", - " ctx.data[\"retries\"] = current_retries + 1\n", + " await ctx.set(\"retries\", current_retries + 1)\n", "\n", " if isinstance(ev, StartEvent):\n", " passage = ev.get(\"passage\")\n", diff --git a/docs/docs/examples/workflow/router_query_engine.ipynb b/docs/docs/examples/workflow/router_query_engine.ipynb index 90c76569f6639..1727683f54ba3 100644 --- a/docs/docs/examples/workflow/router_query_engine.ipynb +++ b/docs/docs/examples/workflow/router_query_engine.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { @@ -152,10 +152,10 @@ " Selects a single/ multiple query engines based on the query.\n", " \"\"\"\n", "\n", - " ctx.data[\"query\"] = ev.get(\"query\")\n", - " ctx.data[\"llm\"] = ev.get(\"llm\")\n", - " ctx.data[\"query_engine_tools\"] = ev.get(\"query_engine_tools\")\n", - " ctx.data[\"summarizer\"] = ev.get(\"summarizer\")\n", + " await ctx.set(\"query\", ev.get(\"query\"))\n", + " await ctx.set(\"llm\", ev.get(\"llm\"))\n", + " await ctx.set(\"query_engine_tools\", ev.get(\"query_engine_tools\"))\n", + " await ctx.set(\"summarizer\", ev.get(\"summarizer\"))\n", "\n", " llm = Settings.llm\n", " select_multiple_query_engines = ev.get(\"select_multi\")\n", @@ -184,9 +184,9 @@ " ) -> SynthesizeEvent:\n", " \"\"\"Generate the responses from the selected query engines.\"\"\"\n", "\n", - " query = ctx.data.get(\"query\")\n", + " query = await ctx.get(\"query\", default=None)\n", " selected_query_engines = ev.selected_query_engines\n", - " query_engine_tools = ctx.data[\"query_engine_tools\"]\n", + " query_engine_tools = await ctx.get(\"query_engine_tools\")\n", "\n", " query_engines = [engine.query_engine for engine in query_engine_tools]\n", "\n", @@ -265,8 +265,8 @@ " \"\"\"Synthesizes the responses from the generated responses.\"\"\"\n", "\n", " response_generated = ev.result\n", - " query = ctx.data.get(\"query\")\n", - " summarizer = ctx.data[\"summarizer\"]\n", + " query = await ctx.get(\"query\", default=None)\n", + " summarizer = await ctx.get(\"summarizer\")\n", " selected_query_engines = ev.selected_query_engines\n", "\n", " if len(response_generated) > 1:\n", diff --git a/docs/docs/examples/workflow/self_discover_workflow.ipynb b/docs/docs/examples/workflow/self_discover_workflow.ipynb index cd4e3e57b6e8d..c38e05bf897b9 100644 --- a/docs/docs/examples/workflow/self_discover_workflow.ipynb +++ b/docs/docs/examples/workflow/self_discover_workflow.ipynb @@ -230,7 +230,7 @@ " if task is None or llm is None:\n", " raise ValueError(\"'task' and 'llm' arguments are required.\")\n", "\n", - " ctx.data[\"llm\"] = llm\n", + " await ctx.set(\"llm\", llm)\n", "\n", " # format prompt and get result from LLM\n", " prompt = SELECT_PRMOPT_TEMPLATE.format(\n", @@ -247,7 +247,7 @@ " \"\"\"Refine modules step.\"\"\"\n", " task = ev.task\n", " modules = ev.modules\n", - " llm: LLM = ctx.data[\"llm\"]\n", + " llm: LLM = await ctx.get(\"llm\")\n", "\n", " # format prompt and get result\n", " prompt = ADAPT_PROMPT_TEMPLATE.format(\n", @@ -264,7 +264,7 @@ " \"\"\"Create reasoning structures step.\"\"\"\n", " task = ev.task\n", " refined_modules = ev.refined_modules\n", - " llm: LLM = ctx.data[\"llm\"]\n", + " llm: LLM = await ctx.get(\"llm\")\n", "\n", " # format prompt, get result\n", " prompt = IMPLEMENT_PROMPT_TEMPLATE.format(\n", @@ -283,7 +283,7 @@ " \"\"\"Gets final result from reasoning structure event.\"\"\"\n", " task = ev.task\n", " reasoning_structure = ev.reasoning_structure\n", - " llm: LLM = ctx.data[\"llm\"]\n", + " llm: LLM = await ctx.get(\"llm\")\n", "\n", " # format prompt, get res\n", " prompt = REASONING_PROMPT_TEMPLATE.format(\n", diff --git a/docs/docs/examples/workflow/sub_question_query_engine.ipynb b/docs/docs/examples/workflow/sub_question_query_engine.ipynb index c40387e3d1ca4..b236e4b003af3 100644 --- a/docs/docs/examples/workflow/sub_question_query_engine.ipynb +++ b/docs/docs/examples/workflow/sub_question_query_engine.ipynb @@ -104,16 +104,16 @@ " @step\n", " async def query(self, ctx: Context, ev: StartEvent) -> QueryEvent:\n", " if hasattr(ev, \"query\"):\n", - " ctx.data[\"original_query\"] = ev.query\n", - " print(f\"Query is {ctx.data['original_query']}\")\n", + " await ctx.set(\"original_query\", ev.query)\n", + " print(f\"Query is {await ctx.get('original_query')}\")\n", "\n", " if hasattr(ev, \"llm\"):\n", - " ctx.data[\"llm\"] = ev.llm\n", + " await ctx.set(\"llm\", ev.llm)\n", "\n", " if hasattr(ev, \"tools\"):\n", - " ctx.data[\"tools\"] = ev.tools\n", + " await ctx.set(\"tools\", ev.tools)\n", "\n", - " response = ctx.data[\"llm\"].complete(\n", + " response = (await ctx.get(\"llm\")).complete(\n", " f\"\"\"\n", " Given a user question, and a list of tools, output a list of\n", " relevant sub-questions, such that the answers to all the\n", @@ -126,9 +126,9 @@ " \"What is the GDP of San Francisco?\"\n", " ]\n", " }}\n", - " Here is the user question: {ctx.data['original_query']}\n", + " Here is the user question: {await ctx.get('original_query')}\n", "\n", - " And here is the list of tools: {ctx.data['tools']}\n", + " And here is the list of tools: {await ctx.get('tools')}\n", " \"\"\"\n", " )\n", "\n", @@ -137,7 +137,7 @@ " response_obj = json.loads(str(response))\n", " sub_questions = response_obj[\"sub_questions\"]\n", "\n", - " ctx.data[\"sub_question_count\"] = len(sub_questions)\n", + " await ctx.set(\"sub_question_count\", len(sub_questions))\n", "\n", " for question in sub_questions:\n", " self.send_event(QueryEvent(question=question))\n", @@ -149,7 +149,7 @@ " print(f\"Sub-question is {ev.question}\")\n", "\n", " agent = ReActAgent.from_tools(\n", - " ctx.data[\"tools\"], llm=ctx.data[\"llm\"], verbose=True\n", + " await ctx.get(\"tools\"), llm=await ctx.get(\"llm\"), verbose=True\n", " )\n", " response = agent.chat(ev.question)\n", "\n", @@ -160,7 +160,7 @@ " self, ctx: Context, ev: AnswerEvent\n", " ) -> StopEvent | None:\n", " ready = ctx.collect_events(\n", - " ev, [AnswerEvent] * ctx.data[\"sub_question_count\"]\n", + " ev, [AnswerEvent] * await ctx.get(\"sub_question_count\")\n", " )\n", " if ready is None:\n", " return None\n", @@ -177,7 +177,7 @@ " each of which has been answered. Combine the answers to all the sub-questions\n", " into a single answer to the original question.\n", "\n", - " Original question: {ctx.data['original_query']}\n", + " Original question: {await ctx.get('original_query')}\n", "\n", " Sub-questions and answers:\n", " {answers}\n", @@ -185,7 +185,7 @@ "\n", " print(f\"Final prompt is {prompt}\")\n", "\n", - " response = ctx.data[\"llm\"].complete(prompt)\n", + " response = (await ctx.get(\"llm\")).complete(prompt)\n", "\n", " print(\"Final response is\", response)\n", "\n", diff --git a/docs/docs/examples/workflow/workflows_cookbook.ipynb b/docs/docs/examples/workflow/workflows_cookbook.ipynb index 4a5a35be9c089..a9af8212f6dee 100644 --- a/docs/docs/examples/workflow/workflows_cookbook.ipynb +++ b/docs/docs/examples/workflow/workflows_cookbook.ipynb @@ -59,9 +59,8 @@ "outputs": [], "source": [ "import os\n", - "from google.colab import userdata\n", "\n", - "os.environ[\"OPENAI_API_KEY\"] = userdata.get(\"openai-key\")" + "os.environ[\"OPENAI_API_KEY\"] = \"sk-proj-...\"" ] }, { @@ -82,7 +81,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "LlamaIndex, previously known as GPT Index, is a data framework designed to facilitate the connection between large language models (LLMs) and external data sources. It provides a suite of tools that enable LLMs to ingest, structure, and query data from various sources such as documents, databases, and APIs. This framework is particularly useful for applications that require LLMs to interact with and retrieve information from large and diverse datasets, enhancing their ability to generate accurate and contextually relevant responses.\n" + "LlamaIndex, formerly known as GPT Index, is a data framework designed to facilitate the connection between large language models (LLMs) and external data sources. It provides tools to index various data types, such as documents, databases, and APIs, enabling LLMs to interact with and retrieve information from these sources more effectively. The framework supports the creation of indices that can be queried by LLMs, enhancing their ability to access and utilize external data in a structured manner. This capability is particularly useful for applications requiring the integration of LLMs with specific datasets or knowledge bases.\n" ] } ], @@ -114,15 +113,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "trivial_workflow.html\n" - ] - } - ], + "outputs": [], "source": [ "draw_all_possible_flows(OpenAIGenerator, filename=\"trivial_workflow.html\")" ] @@ -247,14 +238,8 @@ "Running step answer_query\n", "Step answer_query produced event FailedEvent\n", "Running step improve_query\n", - "Step improve_query produced event QueryEvent\n", - "Running step answer_query\n", - "Step answer_query produced event FailedEvent\n", - "Running step improve_query\n", - "Step improve_query produced event QueryEvent\n", - "Running step answer_query\n", - "Step answer_query produced event StopEvent\n", - "The answer to your query\n" + "Step improve_query produced event StopEvent\n", + "Your query can't be fixed.\n" ] } ], @@ -283,14 +268,14 @@ " @step\n", " async def setup(self, ctx: Context, ev: StartEvent) -> QueryEvent:\n", " # load our data here\n", - " ctx.data[\"some_database\"] = [\"value1\", \"value2\", \"value3\"]\n", + " await ctx.set(\"some_database\", [\"value1\", \"value2\", \"value3\"])\n", "\n", " return QueryEvent(query=ev.query)\n", "\n", " @step\n", " async def query(self, ctx: Context, ev: QueryEvent) -> StopEvent:\n", " # use our data with our query\n", - " data = ctx.data[\"some_database\"]\n", + " data = await ctx.get(\"some_database\")\n", "\n", " result = f\"The answer to your query is {data[1]}\"\n", " return StopEvent(result=result)" @@ -336,7 +321,7 @@ " @step\n", " async def setup(self, ctx: Context, ev: StartEvent) -> StopEvent:\n", " if hasattr(ev, \"data\"):\n", - " ctx.data[\"some_database\"] = ev.data\n", + " await ctx.set(\"data\", ev.data)\n", "\n", " return StopEvent(result=None)\n", "\n", @@ -344,8 +329,8 @@ " async def query(self, ctx: Context, ev: StartEvent) -> StopEvent:\n", " if hasattr(ev, \"query\"):\n", " # do we have any data?\n", - " if \"some_database\" in ctx.data:\n", - " data = ctx.data[\"some_database\"]\n", + " if hasattr(self, \"data\"):\n", + " data = await ctx.get(\"data\")\n", " return StopEvent(result=f\"Got the data {data}\")\n", " else:\n", " # there's non data yet\n", @@ -457,10 +442,10 @@ " @step\n", " async def setup(self, ctx: Context, ev: StartEvent) -> SetupEvent:\n", " # generically start everything up\n", - " if \"setup\" not in ctx.data:\n", - " ctx.data[\"setup\"] = True\n", + " if not hasattr(self, \"setup\") or not self.setup:\n", + " self.setup = True\n", " print(\"I got set up\")\n", - " return SetupEvent(error=False)\n", + " return SetupEvent(error=False)\n", "\n", " @step\n", " async def collect_input(self, ev: StartEvent) -> InputEvent:\n", @@ -479,18 +464,18 @@ " @step\n", " async def run_query(\n", " self, ctx: Context, ev: InputEvent | SetupEvent | QueryEvent\n", - " ) -> StopEvent:\n", + " ) -> StopEvent | None:\n", " ready = ctx.collect_events(ev, [QueryEvent, InputEvent, SetupEvent])\n", " if ready is None:\n", " print(\"Not enough events yet\")\n", - " return StopEvent()\n", + " return None\n", "\n", " # run the query\n", " print(\"Now I have all the events\")\n", " print(ready)\n", - " return StopEvent(\n", - " result=f\"Ran query {ready[0].query} on input {ready[1].input}\"\n", - " )" + "\n", + " result = f\"Ran query '{ready[0].query}' on input '{ready[1].input}'\"\n", + " return StopEvent(result=result)" ] }, { @@ -502,26 +487,19 @@ "name": "stdout", "output_type": "stream", "text": [ - "I got set up\n", - "Not enough events yet\n", - "---\n", "I got some input\n", - "Not enough events yet\n", - "---\n", "I got a query\n", + "Not enough events yet\n", + "Not enough events yet\n", "Now I have all the events\n", "[QueryEvent(query=\"Here's my question\"), InputEvent(input=\"Here's some input\"), SetupEvent(error=False)]\n", - "Ran query Here's my question on input Here's some input\n" + "Ran query 'Here's my question' on input 'Here's some input'\n" ] } ], "source": [ "c = CollectExampleFlow()\n", - "result = await c.run()\n", - "print(\"---\")\n", - "result = await c.run(input=\"Here's some input\")\n", - "print(\"---\")\n", - "result = await c.run(query=\"Here's my question\")\n", + "result = await c.run(input=\"Here's some input\", query=\"Here's my question\")\n", "print(result)" ] }, @@ -556,211 +534,6 @@ "![Screenshot 2024-08-05 at 2.27.46 PM.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAnAAAAFQCAYAAAA/V0MIAAABYGlDQ1BJQ0MgUHJvZmlsZQAAKJFtkL9LQlEUx7+WYphQRERDgUU0mdjTwVUtInB4aNGP7Xk1LZ7Py/NFtDXU0iTU0ha2NEZDLQ3+BwVBQUS01R5JUHI711ep1b0cvh++nHM4fIEOr8a57gRQMCwzORPzLS4t+9zP6IILHvRhRGMlHlXVBLXgW9tf7QYOqdcTclcgddst3naHzdHTpzVWPfnb3/Y8mWyJkX5QKYybFuAIEqsbFpe8Rdxv0lHE+5JzNh9LTtt80eiZS8aJr4h7WV7LED8S+9Mtfq6FC/o6+7pBXu/NGvMp0gGqIUxhGgn6PqgIIQwFk1igjP6fCTdm4iiCYxMmVpFDHhZNR8nh0JElnoUBhgD8xAqCVGGZ9e8Mm16xAkRegc5y00sfAOc7wOBd0xs7BHq2gbNLrpnaT7KOmrO0ElJs9sYA14MQL+OAew+ol4V4rwhRP6L990DV+AQeeWTTJufZ3QAAAFZlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA5KGAAcAAAASAAAARKACAAQAAAABAAACcKADAAQAAAABAAABUAAAAABBU0NJSQAAAFNjcmVlbnNob3Q5GhxmAAAB1mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4zMzY8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+NjI0PC9leGlmOlBpeGVsWERpbWVuc2lvbj4KICAgICAgICAgPGV4aWY6VXNlckNvbW1lbnQ+U2NyZWVuc2hvdDwvZXhpZjpVc2VyQ29tbWVudD4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+CiE2oXgAAEAASURBVHgB7J0HXF1F9sd/BAi9hxpaAgkJpJBioikmmqKxt9hXo+vaV11dXdsa47q6rutadl3d/VtiiS2xa6KpxhRNr5AGgQABQkLvAZL/nHnM4/J4D96DB6+dyQfuvTNz5879zgv8OGfOjNtpkcCJCTABJsAEmAATYAJMwGEI9HOYnnJHmQATYAJMgAkwASbABCQBFnD8QWACTIAJMAEmwASYgIMRYAHnYAPG3WUCTIAJMAEmwASYAAs4/gwwASbABJgAE2ACTMDBCLCAc7AB4+4yASbABJgAE2ACTIAFHH8GmAATYAJMgAkwASbgYARYwDnYgHF3mQATYAJMgAkwASbAAo4/A0yACTABJsAEmAATcDACLOAcbMC4u0yACTABJsAEmAATYAHHnwEmwASYABNgAkyACTgYARZwDjZg3F0mwASYABNgAkyACbCA488AE2ACTIAJMAEmwAQcjAALOAcbMO4uE2ACTIAJMAEmwARYwPFngAkwASbABJgAE2ACDkaABZyDDRh3lwkwASbABJgAE2ACLOD4M8AEmAATYAJMgAkwAQcjwALOwQaMu8sEmAATYAJMgAkwARZw/BlgAlYisHznfiu1xM0wASbABJgAE+icgNtpkTqvwqWdEcg+dkIWZxfpjp3V5bKOBJKiByApckDHAgfMeePH9ThcfAKzRg/D7PRhdvcGGWUZ+j5llmYiszwTDacb9Hnak+yybO1lu/Ok0KR21+pibOhYdYrUsFR5nhaaps/jEybABJgAE7AeARZwPWBJFpcVu/YjJCgI/X19e9CS6956sq4O5ZWVdit6LBkZ9XlQ99hCyCmRtiRriRRnWiE2IFAnlEm0+Qf4wz/QX3W1w5HKTaWa6hqjRTVVbfnNNc2yzomqtj9sSPgpkUcCj8WdUYycyQSYABMwiwALOLMwdaxEv6y35hYhLDIKfp38sut4J+cYEqgVgqC6tASjYiPt0nJl2F9T12SNffOH9R2KSchRsrZVjsQaWdK2l22HEmok0kigRQ2Mks/sTIjJCn34jYSfEnnute44WnkUA4MGIjU0FcHuwdJqx6KuDweEH8UEmIBDE2AB143hI/G2u+AYohIHd+NuvsUUgbysLFxz1kiruFSVa9vUsyi/N1y3D7/3VWeP7JGl0VCwkVjz8PfQW9LsSax1CkFTqBV1xYXFsmRu8lzdcYjuqKnOp0yACTABJtBKgAVcNz4K9Es6eWgKW966wa6zW8gS11h+AvddMBWGAsxwjmF2SZtrjto8bDAHcbCYW9dVMrynq/qqXNt2UkTbc2g+3zeb96KwrEJVNXk01ypHou3DQx9KC5vWuuaIYs0kDIOC4qM6IacVdHNZzBlQ4ksmwARcnQALOAs/ASQsPv1lD+KTky28k6ubQ2DXtm2ymlYkUYZWKMlrA4HWG9Y02REj37TiUissSVRaKgpjQoORFhfVzr2q5rGRcKtsrkRwTLCcs2akK06fpSx0JOZmDZolXa0s5px+2PkFmQATMIMACzgzIGmrSPdpSSWiYmK02XxuJQLWdKNaqUtmN2MYxGD2ja0VA4NOoy56s7S2kYWN5rE5s6XNUj5kmQtyD8KB/AMgNysLOUsJcn0mwASciYCHM70MvwsTsCUBrWXOnH4MjtK5Xym44eM9X2N304+I8o5C+hnp+tsbyxtRmlmKgPgABMQF6PNtcXKy6iTqj9cbfbSbhxsCEwKNllkrUwVmpEelY93RdVi8bDELOWvB5XaYABNwOAIs4BxuyLjD9kiArG+0BpyxpIQauXlpnhwl5fIld+lbh15CpU8lkpOS9RY3Em2bX9yMqrwqfZPeId5IvT4VQ64Yos/ry5PD3x3Grrd3GX2kh5cHrlx6pdEya2XWFNRg+7+34+y/na2Psl2ctVg2z9Y4a1HmdpgAE3AUAizgrDRS1SdKsPiFpzBq+mzsXL0MJ+sbMPSMszD9htvg0b+/fMqulcuwe80PKC0qkNcpE6di5s13wNPbBz99+BZoTeWC/XtRX1uLOXc8gKb6Ovz08TuoKj0O/5BQ0fZ5mHCh+CXZzw0tTc3Y8PmH2P/rejTW1yJ++EjZll9ImFlvlLX1V2z+7nPZl8Gjx8Hb1w8+AYGYdOUNWPPB/+FUSzNmzLtLtnWquQULH7sH51x/GwaNGS/zMn5ehS3ff4GK48UIjx+Ec8R7xgzRLZfx0dMPI3FEuuDwAwJCw9DcdBLp58zBmPMv1vftq5efRYS4j57nTEmJNbVkiBJqxt5x8aHFIAGSPCwZsQGx+irHth7DT3/6CQnnJGDy05MREBuAqtwqHP7hMLa/vh0NZQ0YedtIff2+Ppnzzhy4ic+gNhlea8usdV68pRhFW4r0zZFFjr7IGrd903Y8P/F5fRmfMAEmwAScnQBvpWWlEW46eRKlhQVY89E7GDp+EsbMvAB7163CqvfelE/I37cXy999HWFxCZjzu/swYuoM7P15JXauXCrLSaRt/eFrIc76ITwuHn5Bwfj8pWcQEhWDC25/APGpo7H2k4XI3bND1l/1/pvY9O0SJI+bgIkXXYm8fXvw8bOP4nTLqS7fqOTIYXwpBJSHpyemX3sLjuVkYfuK71FdVirvrTxxDBUlx/TtkLCkd2uoq5Z5+zeuxdL/voyQ6IFS1DXV12PR039EVUmJLC/JO4wNX36MgUOHIygiCiER0dix6nt9e5XFxTgkBGT04BR9nqOfkGXtxZsvw13nTZFf0trWyQ4TWvFmOM9t73t7ETo0FBOfmCjdkm7ubghKCsKYe8ZIC1zmx5k4WX1SIlv1+1XI/TFXj48sd8tuWSaieRtlHom9X575BV9d/hW+ve5b7HxjpxD/LbIs/6d8rHtiHbb8fYsspzK6l9pQiZ7zw29/QNGmNuFErlzDL/+B/miub5Z1jyw/om6Xx58e+gn7P9ZtM1a2vwxrHliDzy/4XNbNWZqjr7vrf7uw+/92Y8uLuv58f8P3OLjkoCw/tu0Y9r6/V55TH6vzdZ9FyiARV+tdi8c2PSbL+RsTYAJMwBUIsICz8iiPnXUhzr7uFpx1xXUYN/ti7P5pOZoaG1BfVYnUydNx/m33I+WsaZh5y90IDAtHWdFRfQ88vbxw7ZPP4/KHnoJ7P51xNHHEGAw9cypmzbtbiKVb4RscLNqqwi5h3Rp3/iU49ze3Y8LFV+FKcU95cRFydm3Vt2fqhIQjPXvuo89i5DmzxfEvpqoazf/1688QHpuASx94HKNnzsFv/vIKqO/bl3+jr5+QOkqWX3Lfo0gTYpUEYOnRfFm+79e1sn7CyDH6+o5+0pmlzdi7KcuboXg71XwKJzJPIP6ceLi5tbdyUTvRE6NlcySEKFVkV6CxSifW6Lq5rlm6XU8JIX+65TTWPLQGJbtLMPza4Rg4aSAOLDmAHf/S/RHQWNmIwl8LUbyjGFHjozAgdQAaKxpxZFWbADu67igqcysRltpm2T2x5wRO7G3/VXesDh4+HvAJ9cHhpYepKzJRP4/tPCbvpzor7lmBpoYmpN+VDr8oP2x+aTPyVuXJuvUl9dj3yT5UHK7AiJtHwD/GHzve2IHK7EoExgci+gzdu6fekArvUO/WJ+gOLOLa4eALJsAEXIAAu1CtPMjxaW0T0OOGj8Kvwkp2Iv8Ihk6cjPCEQdj+49c4JixgRdkHpGu0RbgqVQobGC+Eje4XU1BUFKKThmLl+//FL0IwDZs4BcMnTUNEwmAcPZApb8nP3IMvhZWOEv3CplQm3LODMUGem/pWkpMt3Z1u7jr9HiDEXFC4buV+U/eo/NOnTuN4wRFhIQzRP1uVkUhTKUr0XaXB6WdIwXbg15+lyzRzwxphgZyJfh7uqopLHcn6FhVjPMK0+ojOsuQbbnxrNhUoUH6oHFFndD5mhRsLpZib+sxUxEzWRU2TwNr9zm6M+t0oPfOznjgLA0bo5uaR2MtbnYcx946RAjJvTZ4Ufv0DdNMA6KbVD67W36tOUq5MQfrd6UiYmYBNf98k3bwksuh+mrsXPiocO/6jE47TXpgGryAvJF2chLUPr8W+T/chfka8bIrm0p378rlw93bHwCkD8c0136Aip0K2GzY8DLmrcuW5eq72SCKu4EABaF4h7+igJcPnTIAJOCMBFnBWHlXfwCB9i/5hul+Kp1pakLNjK5b842l4+wcgblga0s+dI9yWOvepusFHlGnTNU88h4yfV+PQlg3Ytvxb+XXhnX+Ab2CIrBYcGY1g4aJUaUBsPMJidL8IVZ6xY5OYk9ZYV9uuyD9Y16bKPH1aJwjpuqVZ567TnTfJKgHi3UKj2+Zt0XlgaLgso2/efv76c/f+nhgurI77f10nhOxUaY07/3f368td7WTj8Y3wj27jo31/nwgfeam1qmnLT1bqxsI7uL0FSltHnVcd0QVAZH2bhcPLDsvs+lJdFGnN0RpVDcHJwfrzxFmJOPTVIZzYfUJavYq3F2PyU5P15XQy+43ZILeuNqn+xJ4dKwVcwc8FSL40WVrzBp8/GBDVaR4fJXKRqlRVUIW6kjp1ieCkYCneKMMnTMeipUHn8tVX6uSEthHjxASYABNwBQIs4Kw8yhSEMDAlVbZ6QswFoxQen4ivX/6rEFexuPm5f8Pd0wNkyVr/+SJxbBNKsnLrt3LhWt2zdgUmXHQV0mddgHqx4fv7f34AGetWS/crVYuIS8RZV14v72ioqcHWpV/AV1jGukqhYl5dppjHRsEJZAVrbjyJo4f2IWxgnLzV3cMTdeJ5KlUcK1anMiCDRGh/YSmcJly6Km3+ZjGUYFV52mPqlHOlO3nbsq+k+zYmeZi22KXOaQ/Q9KFtllrty5Olyz/KH+UHy7XZwGlxKURQeZYuX1niZCXNR6ilsU3sNDforLu0BEk/MbeSEgVERIyOgKefp7ymbx7ebT8GQoeFyufT/LigQUEgi1j0mTrXpbqBBJ+poAVyo1LwBd0fMiQEDeUNSJiVIG9tqm2CV6CX7INqi/pDidy9lLR9ofe1NJEVjhZA5oAGS8lxfSbABByNgO6nuqP12o77u3PVMmltoyjPtZ+8h8Gjx6O/jy/8RDRmrZgHVyU2ba+rKMfKhf8Rc+Ma0SyCH4wlH/9AMafsW6ylKFQRUFB+rBD1NVUicCBWBg9QxOdWUb5/40+goIDl7/xbBkEEhIYaa65d3ihh/aO0+oP/CvduLtYs+r925WTVI0FH71CSe1j09Y125WNmzJFBE5uFe5j6RuJt7afvwbM12rZd5daL2JQ06XalOYFpU86RkbTG6rlCXlJoEmiHAVOJXKMUcaom6pM17Mc7fkTRr0Vykj8JvOChOquZu5c7SBipVFPU1q5ftJ/Mjp0ci9F3jpZf8efGS5HUP7DNJaruVcfE2YnIX5ePoxuPIn56POgZliQSbCV7SpD1dRaCEoP068NRoANZFtPmpen7Q4LRO8y7g0XPkudp69ZU1WBs6FhtFp8zASbABJySQNuf3k75en3/Un7CFUmuUkqJaaNx8e8fkecTLrxCTuJ/66Hb5fWgUWPFMiOTcPTgPnktJhzpjq3fvQMCZODC+iUfSssVZVNgwKTLr5U1Lr77ESx98yV8+/o/5DUtyXHhXX8UQQ5dW+Diho/AzN/cgZVCwO0QUbA0/43mtKk0dvZFyN29XUaqUh4tjaKWPqHrMy+9FnVCjFJULH1RQMQUsRxIoliOhJK7Z/8OE/DJYjPi7Bkycnb4ZCHgXDh5u3mjUfwzlUhslR0ow/I7lsvJ/r4RvjLq9Ocnfpa3THl6imCsE1Xkis1ZkSPni5GQ2/POHn2zsVNjseP1Hdj1310YfcdoePp64pdnf0F///5IuzlNX8/whOaxUcQnLdlxzosdxyp/Tb5RwRU9IRoevh4yIIIsbTRfLf3ONkvjoPMH4cjqI9j+ynYMu24YaotrsfEvG2WAhWEfjF3389T9vUkRseGjw9tb61pvoC235s6Za+x2zmMCTIAJOBUBFnBWHk5aD41cpuQS8vLXWUDoEQOEu/M3f3lZWt88fXz0wQrq8RStaZjShOChr0rhwqToUxXgQPUCIyJw7VMviPXm6tEirHg+QW1z7wzbMXZNa7KNFkud1FWWS9fnh089qK9Ga8n95q+vtuvreWLpE5U8vPpj9m2/x4yb7xJCrhwUBKFNv//vx9pL/XlDbQ2iBg/Ru2r1BS52clXyVViwaYFc/80wCpVQkBuRJvrvenMXaEkRckNKV6aIwqw7Xoct/9wi3N+nEHdOnBRI6+evx/K7lkuKKVelyEhTuiAr29nPnY1fX/hVH3gQNTZKBiiQe9JYlCvdR5YyWsaEBFZ4umZsW//G+OW5X6hah3TBuxfIHSNofhyJwINfHJTRtKpi5LhIjL13rLQikuikdyJ3a9pvTItJeW/rcyPGREgX7M+P/wxtYIZqn7baIusmJybABJiAKxDgvVAtHGVTe6GWiQjMtx++E9c/9Xf9HDgLm7ZK9YbqatA6bJ2leBEdS4sBaxMJuHAhMrVCTVvek/MjwpqXL+YGUjTtxff8EcMmTTfZnCPvhWrypYwUdLYOnGF12r6KIjpJGNHcSVo7jaI0SSTJJKaPkbCjSf+GwQWqLSkChTCkOWpdJtHe0puXgix42mjVLu8zswKtK0jv1Fl/TTVFfxjRenOe/m1z+KguiTf3Wne8POllU7dyPhNgAkzAqQiY8dPcqd63xy9DC7ZuzW1b1FQ1SJP6adkPL1/jyz+oer19LC8+ip/FfLTO0vVPvSgmtbef10QuWG1Ea2f3W1pWXlwoFvJdJnaRuKJT8WZpu45cX239tHi/bkkRmnxvKvmE66IxqZxc0YMvGty+qtDi5GbtLNFSHuaknGU5KNlVguqj1Ui+JNmcWyyuQ5a/rvprqlESqCzeTNHhfCbABFyJAFvgLBxt2rD801/2ID65d365Wdgdp6u+a9s2uaOB072YiRdSljhaF64zEWfidqtn//rsryjdV4qRt47Ur81m9YdYqUEKBKkorECQRxBHnVqJKTfDBJiA4xBgAdeNsXpt6Tp4hQyAX4Dxtby60STfIggUFxZicLAfrpgwwuV42JuQs+cB0Aq3G4fcyIv22vNgcd+YABPoNQIs4LqBlq1w3YDWxS21wpqSdfCAS1nfjCHRCjn/QH8YC3Iwdp8r5CnhdqLqBOYmz4VyQ7vCu/M7MgEmwAQMCbCAMyRi5jUFM6zYtR+R0dFiW6QYM+/iasYIkOXtWFERZo0ehtnprrvAr5aNEnIDgwaixa/FLtyr2v711TmJNgpQoCNFmLLFra/I83OYABOwdwIs4HowQmSJ+3JLBo6VliNELOPR38YBDD14FZvcerKuDuVix4e48DBcOG44LN0Q3iad7uOHkpCjtDhrMQYEDoCHvwec3TKnRButl0fz2+hIS6/w/qZ9/OHjxzEBJmDXBFjAWWl4SMxlF52wUmummynZqdtPMiI90HQlBymhiF4WbeYPFm3Snlmaie1l25Fdlo2UuBRUtlQ6tKAjsUaJrGwk1Mg9qixtlM+ijShwYgJMgAl0JMACriMTu81Zef9e2beZr7reJH+7HRQbdkxZ55Sgo64oKx2d25uljsQabXXVXKPbo1WJNeorbX+VGpbKgo1gcGICTIAJmEGABZwZkGxdpWRnJX55Phs1xQ24Ye0kW3eHn2/HBJSVjrqoFXY0l67xdCPCgsKk1U69Aok8w2RO4ISynBneSwKNysiaRolEmkpkWVPuUMpj65oiw0cmwASYgOUEWMBZzqxP79i9MA973i2Qzxx5i1gZf158nz6fH+YcBEjYUSIXrDaRyDNM5J7tKhnbsorEWWpIqrSkLclaIpuYP2F+V01xORNgAkyACXSDAAu4bkDri1vI6rZ7YQGO7ajUP46tb3oUfOIABBZsXiAFHS/34QCDxV1kAkzA4Qj0c7geu0CHyeq24n4R3aoRb2R948QEHIkAWd8oelZZ/xyp79xXJsAEmIC9E2ABZ0cjRFa3lQ9k6F2m2q6x61RLg88dhcD8ifOxYNMCFnGOMmDcTybABByGALtQ7WSoSLhpLW7abvHcNy0NPnc0AmpR4s/mfOZoXef+MgEmwATslgBb4Gw8NGR1WzRto0nxZuPu8eOZQI8J0Bw42vqK5sRxYgJMgAkwAesQYAucdTh2q5XOrG7aBjl4QUuDzx2VgFq3joMaHHUEud9MgAnYEwG2wNlgNCyxunHwgg0GiB/ZKwRIuGWWZ0IJuV55CDfKBJgAE3ARAizgbDDQxcJtaq4w4+AFGwwQP7LXCFBkKou4XsPLDTMBJuBCBFjA2WCwSZTRF7lGSchFjgky2gtzRZ7RmzmTCdgpAdqYnpcXsdPB4W4xASbgMARYwNl4qEjIRaQHwD9Kt/WQtjtsfdPS4HNnIUBbaKnlRZzlnfg9mAATYAJ9TYAFXF8TN3ie2irr0k/HtrPEsfXNABRfOhUBEnEcmepUQ8ovwwSYQB8TYAHXx8ANH0f7nCqxNmoe77ZgyIevnZcABTXQ3qkc1OC8Y8xvxgSYQO8RYAHXe2y7bHmVWLyXxJtylUakB+mtcCqvy0a4AhNwYAIcmerAg8ddZwJMwKYEeB04G+En1yklY0KNyozl26ir/Fgm0OsEeOP7XkfMD2ACTMDJCLAFzgYDSgLt+M5qkyKNxZsNBoUfaVMCHJlqU/z8cCbABByQAAu4Ph40WsSX5r3NeCWtj5/Mj2MC9ktAG5maUZZhvx3lnjEBJsAE7IQAC7g+HogV9+vmvfXxY/lxTMDuCajI1CVZS+y+r9xBJsAEmICtCfAcuD4cgc7mvfVhN/hRTMCuCVBUKu3WQLs2cGICTIAJMAHjBNgCZ5yL1XNJvJHrlOe3WR0tN+hkBNRm97y8iJMNLL8OE2ACViXAAs6qOE03pl3vzXQtLmECTIAIcFADfw6YABNgAp0TYBdq53ysUsquU6tg5EZcjAC7Ul1swPl1mQATsIgACziLcFleWblOaeN6TkyACVhGQLlRlVvVsru5NhNgAkzAeQmwC7WXx5Zdp70MmJt3agJqpwZeWsSph5lfjgkwgW4QYAHXDWjm3kLWN+1WWebex/WYABNoI0Dz4RZsWgAWcW1M+IwJMAEmwAKulz4DynXKUae9BJibdRkCvD6cyww1vygTYAIWEGABZwEsS6qy69QSWlyXCXROQM2BU3PiOq/NpUyACTAB5yfAQQy9MMYcddoLULlJJiAIXL3sasyfOB9klePEBJgAE3BlAmyBs/Loq71O2XVqZbDcHBMQBEi88VZb/FFgAkyACQAs4Kz8KdizsEAGLli5WW6OCTABQYAsb6khqViweQHzYAJMgAm4NAEWcFYcfnKdhqcH8HZZVmTKTTEBQwI8H86QCF8zASbgigRYwFlp1Nl1aiWQ3AwTMIMAb7VlBiSuwgSYgFMTYAFnpeFl16mVQHIzTMAMAry0iBmQuAoTYAJOTYAFnBWGl12nVoDITTABCwmwK9VCYFydCTABpyLAAs4Kw0lrvnHUqRVAchNMwEIC8yfMR2Z5Ju/SYCE3rs4EmIDjE2AB18MxJOsbbZfFiQkwAdsQ4K22bMOdn8oEmIBtCbCA6wF/Em/Hd1az9a0HDPlWJtBTAjwfrqcE+X4mwAQckQALuB6MGrlOadkQTkyACdiWgJoPxxve23Yc+OlMgAn0HQHeSqubrMn6RonnvnUTIN/GBKxMgMTbgk0L8Nmcz6zcMjfHBJgAE7A/AmyB6+aYkPWNExNgAvZDQLlSecN7+xkT7gkTYAK9R4AtcN1gy9a3bkDjW5hAHxHgDe/7CDQ/hgkwAZsSYAtcN/DzsiHdgMa3MIE+IsAb3vcRaH4ME2ACNiXAAs5C/LxsiIXAuDoT6GMC5EqlxK7UPgbPj2MCTKBPCbCAsxA3W98sBMbVmYANCPBeqTaAzo9kAkygTwmwgLMAN1vfLIDFVZmADQmogIYlWUts2At+NBNgAkyg9wiwgLOALVvfLIDFVZmAjQnw2nA2HgB+PBNgAr1KgAWcmXjZ+mYmKK7GBOyIgNpmy466xF1hAkyACViFAAs4MzGy9c1MUFyNCdgRAeVKXbB5gR31irvCBJgAE+g5ARZwZjBk65sZkLgKE7BTAuxKtdOB4W4xASbQIwIs4LrAR+KNrW9dQOJiJmDnBMiVygENdj5I3D0mwAQsIsACzgxcI2+JNaMWV2ECTMBeCfDacPY6MtwvJsAEukuABVwn5Nj61gkcLmICDkaA14ZzsAHj7jIBJtApARZwneIB2PrWBSAuZgIOQkAFNLAr1UEGjLvJBJhApwRYwJnAQ9a34zurMWpevIkanM0EmICjEeCABkcbMe4vE2ACpgiwgDNBhgIXwtMDTJRyNhNgAo5KgNeGc9SR434zASagJcACTkuj9Zysb1Fjgtj6ZoQNZzEBRydArtS0sDTe7N7RB5L7zwRcnAALOBMfALa+mQDD2UzACQjMnzAfi7MWI6Mswwnehl+BCTABVyTAAs7IqPO6b0agcBYTcDICc5Pn8tpwTjam/DpMwJUIsIAzGG1yn3LkqQEUvmQCTkiAAhoySjPYCueEY8uvxARcgQALOINRZuubARC+ZAJOTICtcE48uPxqTMDJCbCA0wwwW980MPiUCbgAAV5WxAUGmV+RCTgpARZwmoGldd84MQEm4FoEeJ9U1xpvflsm4CwEWMC1jiRZ34p3VPLSIc7yyeb3YAJmElD7pHJEqpnAuBoTYAJ2QYAFnGYYOHhBA4NPmYALEWArnAsNNr8qE3ASAizgWgeSgxec5BPNr8EEukGArXDdgMa3MAEmYFMCLOAEfg5esOlnkB/OBOyCAG+xZRfDwJ1gAkzATAIs4AQotr6Z+WnhakzAiQnwFltOPLj8akzACQm4vIBj65sTfqr5lZhANwmQFY622OLEBJgAE7B3Ai4v4Ox9gLh/TIAJ9B0BssLR4r6LD7GI6zvq/CQmwAS6Q8DlBRy7T7vzseF7mIDzEqDFfXmje+cdX34zJuAsBFxawLH71Fk+xvweTMC6BHiLLevy5NaYABOwPgGXFnBsfbP+B4pbZALOQIA3uneGUeR3YALOTcBlBRxb35z7g81vxwR6SmD+xPlYkrWkp83w/UyACTCBXiHgsgKOrG+cmAATYAKmCPDivqbIcD4TYAL2QMAlBRxZ36LGBPG+p/bwCeQ+MAE7JsBbbNnx4HDXnI5A3tcLUbl/p9O9V2+9kEsKOIIZnh7QW0y5XSbABJyEAFvhnGQg+TUcgkD1wZ3IePF+FnFmjpZLCjgOXjDz08HVmAATAFvh+EPABPqGQOX+HfJBJOLIGsepcwIenRc7X6lynzrfm/EbMQEm0BsEyAq3oHQBMsoyoCxyvfEcbpMJOAIBrYuz8oDO3UmWM21SQozygoaNQcDQdH1xUEq6yGu71hcYnBR8867Mib90nkEJXyoCLifg6MXZfaqGn49MgAmYQ0CtC5c2Ic2c6lyHCTgNASXYCr6l+Wk6C5klL0f3aO/Thg8qcUeizlhiEWeMSlue22mR2i6d/2zRtI24Ye0k539RfkMmwASsSuDqZVeDlhZhK5xVsXJjdkiARBsJNkpa8SUzTHwjMaaS1uJG1jlz21D3Gx5jL7kFbIkzpAK4lAWuZGeljD7tiIFzmAATYAKdE0gLS0NmaSYLuM4xcakDEjBHsCmBFnvxPP0bmuMK1VcWJ8qap3W9miPuyBJHQjDt4Ve0zbn8uUtZ4Gj+G6VR8+JdfuAZABNgApYRoDlwCzYtwGdzPrPsRq7NBOyUgBJupkQUiTYl2CwVa5a8ctbbz6Nk4w9d3qL605t96bITdlTBpSxwFH3K7lM7+vRxV5iAAxEg1ylZ4TiYwYEGjbtqlIAp4UYCidyf5gYaGG28G5mNZcfMuouEJn2lPfyqWYEQZjXqwJVcRsCR+5RTG4HsYyewfNd+NDWfRv7x0rYCPuuSQGRYCEbFRsp6s9OHdVmfKzgPAbWkCAczOM+YutKbdCbcyNJmC8tWxosPWDxHjpYZYREHuIwLld2nbT+mXlu6DjUnmxEWGSUz/QL82wr5rEsCtdU1qK6uAk42SCHHIq5LZE5VYcHmBXJtOA5mcKphdeqXsUfhRsC7I960AxUooldHPPKqNsulzl1GwFH06axX0xCRHuRSA2z4sl9s3ovDFbWIiokxLOLrbhAoLixE/+aTuO+Cqd24m29xRALkQqVN7udPmN+u+4sPLUZqWCoHObSjwhe2JGBKuNlDVGdPxZuWqz28j7Y/fXXuEi5U5T51dfG2fOd+lNQ1sniz4v8uEsLZBw+C2LIlzopg7bgpsrwtEf+0c+FIvC3OWoz5Ye1FnR2/BnetDwmQkKKkj77c84vJp1fm7JdlQTEJgJeP0XoBI8/S56s11Azdn4YCyd4CAMhlS1/ERC0EbCqYQv+yJk5cdb04l7DAsftU96l/+L2vMHrcOBP/BTi7JwR2bduGF2++rCdN8L0OREBrhVPijbpPC/7OHTLXgd6Eu2otAlqRVt0q0PRiLCxcPibQy1139Dc9bSUoULdPd2VVtcmuVdXU6MuqGluE0PNFZeERmRc0aBjUc1UlR5sv1o5l6y4P5oo7R3tXNUbdObqEgFv1QAZGzot1afcpBS18uSUDUYmDu/M5MXpPy8kmlBUfRUhUDNzEP3Xu0b+/0fq9kVlTegKnWloQGKELKuiNZ5jTZl5WFq45aySSIgeYU53rOAGBRzY8goSABKw9ulb/Nizg9Cic/kS5J9FYLwVTkEakBbYKNCXG+hIGCT8l8EjcVZYel4+PPecSICCszyNMrf3u5og7V3GpuoQLtXhHJWa8wlvgnGw+ZdX/SyTYFj52L276y8vo5+GpP48cPKRHz9ny3efiD0o/jDr3/C7bWfvJu6itKMfVTzzXZd2eVuisX/19fZFddIIFXE8h2+n90uJ2SOc21XYxtypXe8nnTkxACYeCJW/oBRtZ1EispY1v24XA1ghINLYTjoNiZZfyD2yWx4zWPUZJ5FBytB0OToTEyhUU4JOMwxHB8h0QMV0ewyryUBos1nmtEJfC42TrNDh6AJIidH/U98YUG6cXcOQ+HXmL7gNs68Hk55tHYMMXH2HKlTeYVXn0jAvQcrLRrLo9rWRJv3r6LL7fvgjQvLfM0Ew5762znmWWZXZWzGUOSEBa2jSiLTYsyK4Em7lI42KiZVV1zN+6DGSh2ygEnaOIOZprvEIsf2UqSfFmqtAG+YfFH/X0pZK1RZzTC7jjO6t583r16TFxPN1yCj9/uhDZOzajqbERQydMwpmXXAOfwECcrK/Duk/fw4HNG9HS0oy4YWmYcdMdwhKvm9Nhokm0NDVjw+cfYv+v64WHoRbxw0di5s13wC8kTN5SJ6xmaxa9hSMZu0VbA5AyYTImXHglvnr5WdmHX75ZjKqyEzj3N7ebeoTMz9m5BXViSY+EUWNxcNMG7Fm7Aglpo7B9xVL53KHjz8KMm+8EuXV//vhdNJ88iaoTJcjN2InowUPlMweNGS/bWvT0H5F+7hyknT1DXhce2o8f/vcKrnvyBfz41msW9avTTnOhQxKguW0k0MgaZyp1VmbqHs63PwLKPUrzrsg16qiirTOySshBWOhIzOUXFqN662oEjD/XLq1yWvGWGBeLoKBAhIjfUfacyquqUFlZhdz8Aik8s0tO4K7ZU6zW5X5Wa8lOGyL3KW+d1fngrPnoLWz+/gvEp47CxIuuwt51q7Hxi0XypqX/eUmIoe8x9IyzcMb5lyJ/fwY+WvCIsHo1ddroqvffxKZvlyB53ATR5pXI27cHHz/7KEgs0r2fv/QMDu/ejnHnXYTBQnyt/WQhDm7ZiFHnnCfbHTRyjHjm5E6fQYVVYn5HZUmxrFcvhNzhXVvFu3yJ0efMxvAzz8bun5Zjz+ofZTnV3bb8W1QKATdr3l1oaW7Gkn88jbLCAll+PC8H9dVtCz43CfFaKspIuFraL9kgf3M6AlcNucrp3olfqD2BjGfvEOuT3Y/AqiKkDU1GmhA47VyS7as7xRWJuUnCDRyIRrgJMbfxt9OQ9/VCu3k3rXgbPSIVJODsXbwRPOoj9XX6pDPFZyhQWuPoXayVnNoCx+7Trj8mp5pbsO2HbzDt2nmYcLHul5O7pyeyt29CZXExDonjmZfMxdRrbpaNRSYmS9FzYNPPCE9IMvqAevFXx67VP2Dc+ZfoLWixKWlYtOBh5AiB5enji+LDh3DjgpcQnZwi26gXUVUlR7Ix5eqb4OnlhahByYgV1r7upMsffFLf7uFd23As73C7Zq5+7K/SujhUWP1e+e1c7P15Jc4W799ZGjx2Qo/71Vn7XOYYBMiVSoEKtGQIJ+ciQIKFlqOIi4nSu0hbTp3GobJaHKmsQ2yADwYH+6K/R+/YPRrFHGV6jqmUGNR7z1bPVFY5OuYf2IS8D0sRf+NDqthmR+U2dRThZgxUYnwsdu3NlJa4JJobZ4WAN6cWcMYgcl57AmVF+TIjTrg4VRoprFf0dWiLbq2ixFFtS4/Epo6Q1cgyZUrAlRXpLFr5mXvwpbC0UTolLG+UqMxdBDxQUuKNzmfechcdrJIiNMIyaEC4cJu2zZELj02Q4o0e5Ontg+ikoUI4thd4VukEN+K0BNQyIaZEHLlReZcGxxp+sroJc760uClrW+aJavzhhz0ormn7+eHt6Y5Xzx+JMweGyBdcn1+GdXmleGxyzwK3qLH9pTW46cttJsEtnnsGhoaZXn7E5I1mFtSdbMHzG8Qf1qNikSKeExfgJUTcZmmNs2VUJ62gQIksWCTgHDWRNY7eoVIYOKyVnFrA8eb1XX9MGmprZSWyuhkmNzc3meUlLGYqubt7SkuUKlP52mNTQ4O8DI6MRnBElL5oQGw8wmLiUZxzSLahL7Dyibtn28fasJ++ge134ggU81saatrWWzp9+rS+N81NnbuJ9RX5xOUIdCXiXA6IA78wibfAxgrECVepSvRT4JEVmQj26Y9nz00VO2z4I6uiDov2FOCOb3di5U2TEe7bH1/sK8LJ1j9O1b09PT49fRjSo9r/nKI2YwONL+rb0+ep+wtqGvDNgSJcN3KgyhLWyGj5lbHhO+SJXFtErFJ0P6UQMeetp6nlVAuO54v18sTvtqj4QT1tzuL7lRWO9iG3xly4tt90FnfFvm9g96l54xMcrls/rbTgCCISBsubDomAhbWfvovzb7tPXueJCf8RrevHFR8+KCfzh8eJVcJNpKDWNdki4hJx1pXXy1oNwkW6dekX8A0KkaKOgiVqykrhH6oLalj70TsoLzqKyx76s4lWrZN9JHM3yG3cz0MsqCncI8U52Rg6/kzZuLtnfxm0oZ5UcaxInfKRCXQgYCqoIbM0ky1wHWjZZ0behy/pxFtrhKbq5SnxsyFfuDMvSYnGGTG6pSpGRwZiUPBQaQWrPdmMrw4UY0N+KRqaWnCDsJwtunwcahub8eqWHKw8fBzNQtiNFfc+KqxzUf5eKKpuwF3f78blqTFYnHEUdeK+cwcNwMNnJcNL45aNEUJtkHDVGkvv7S4QbZfgvUvHoF/rH9grxLPe3JqL90Wep3s/vL4tF8uzj4m+tGC8sBQ+OjkZEX5tz791bLwUorlCkI6ODMKCaSnw6++B+3/YLR9JVsf7JibhwiFta2vSPMAMEeBgKxFnjEV38uiP9dce0nl7nluimxvdnXbs5Z5+9tIR7odtCPiLCNA4MT/t50/fly7TY4ezsfHLj0Fz3QamjECE+Ctl24/fygjPowcy8ZMQWjRHbeBQ0/PTQqIHImbIMGwVAQP7N/4k59Itf+ff2PrD1wgIDcWg9DNkG0vfeAnFWQeRK+apURDFoNHjJQQSUoVZB3CigH5cWD/9JII2TuTnYsXC/6DyeDGGTtRFBYUIiyEFcBCDfBEdu27JB+0e3tv9avcwvnAIAvMnzneIfnInOxKgOW9uwkWo5n1pa7j3c8Plw2N0FqkvtuHtnXk4KFycgV4euG1MPBKFwJocGyKPQ8ICcPPoeHn7Y2v249O9BVKY3Tg6DtsLK3DzV9tA89sahKDLEftQ/3PjIVEejqvTBuLbg8XSbal9Nj1ne3Flu68DIo/SqIgA7KayorZgqy/3FyHY2wN+om9/E20v3HEE0xPCMW9MArYeLcet3+wUf6ue1j//z6v3YWxUMO4+YzA2FZThhY1Z8BbC7/Jhuv2xLx8+EKnhut0gtP2K9feUcwTVenjaMj63DQGntcDR8iG0+wKnrglccOdD+PY/f8dXr/xVVk4WE/bPnnsz3MQPsUvuexTfC6H19WvPy7KwmFhc8/hzIOHXUKf7oQL0I4t0a9L9TXDx3Y9g6Zsv4dvX/yHzSQheeNcf4RusmztyxUNP4TtR9sH8B6WYGy2iT0dOny3rpk05RwZWlBcX4ubn/6Ua7vJI/TVM5ELVulHJZUpLm5Ao9fYPwAV3/EGKTbpv+vW/xZdiGZP3/3y/bOaMOZdhy7K2xSC72y/DPvG1cxEgEbdg0wLneikXeBsKWKDIS1PpqbOHIkS4UD8X1rLXfs2WX2G+XnjmnGGYEhcqRc5AEdhALtTZg8OlhW1t7nHcOjYB908YLJsdPsAf9y7djR+F1WxkhM4FeM2IWDx4pq68QXgD3hWC60+TkvXd+IeYh2aYSCQumTteulbJmvdDdgnGC+teRUMTNog5ePOF27WiXkT3Zxbi+pFx+JOwulEaGx2Im7/cLubplSE+SOeCvf/MZNyaHifLc8trsVFYESkwY3riALy++TDOThB/ZBuxANLcQArwoL1LDfddlY3Z4beTjQ34/p03kCG8SjQVaMLMjovDV1eUYeXH7yFrz06x9FQdEoal4oJ5dyAsUidoP3hhvliN4Chm33Arfv7qMxSLOdMDxWL1l9/9IAYIYwWl8uPH8O3b/8HhPTvgJ7xMo6dMw4xrboK7e+9KrN5t3YYDyrsvmA8/MCICNzz9DzEXrEZuieXl76e/maxpNz7zTzTW1OKUmD9Aa8OpNEC4SB9e9J26bHdObV771AvCJVkvlg05CZ+g9nM64tNG4+7/fCDdqD4BQdDOW6O136Zc9RuzPvwX3vOw/vm0c4Ph7g1zH9eJUlUpVGz7NffRZ1FTLty3tCadRvRR1Ovv3/gI1eUnxH/CUOlmnX7jbepWGVFrbr/0N/GJ0xMwjEzlxXztf8jJiiQ3i++kq+SivH/CIPz+jERkHq8R7tIyLNqdj3u+34W3LxkjBZT29gwR9EBpcmyoPnt8tM79miPclUrATRioy6NK5J4lAXdICCmVnjw7BaOEu1abvGnKh0j0J+olKTHSyvf4lCFYnaubHzZLWPSyWtvYIqx+9wk3KKVm4QqmRM9XAo5EpUqR/t6ob9IFmKm8zo6060QB7fN66bzOqtlN2df/ew071q7S9+fHjxbqz+mkoa4O7z7zGIrzcvX5+7b8iuzdO/DQ6wsREByKknyxu4MwJnz49wUIEXO6T4rpPzn79grB9jpuefI5NDWdxP/+/JBYnuo4+gvvVLlY1uqnLz7FSTEX/KJb79a32xsnOnNJb7RswzZp/lvUmPaCwYbdcZhHe4v/nFrxpu045WvFm7ass/P+Pj4dxJu2Ps2B04o3VUb30Ry5vIxdJr/I1dmtJEQbWRC14k3fjiijRYrlHDl9ZtsJ9cu9f8eAj7YafOaKBGg+HC0vQqm2qe2XsSuycIR3JisSrXlmKu0uqcIzPx8Q89tOyblmI4Tr8o5xCfjmuomgSNSVOcc73KrmpNF8MpX6C9ck1ddJL11uqLDqqRQl5qZRUkKLzuOF9YuiQLVfCa3WMyq/cEgEKhubsE24UX/IKsHMpAgECPcpzamjREItQbRBX0mhfrgpPR5JIW1z6rw18+00f7/Ke7v6Rla4yhzrrWPW1fN6Ut5QV6sXb1ff9zCeeOczTLrg0nZN7ly7Uoq3AOEZ+sNrb+GR/34g9wsnkbZ68aJ2dc+YcT4e/s97uO7BJ2R+ca5u9YJdYtoNiTcSd4+/u1gIv3dl+calXwuB2Ls/C9o+ae266vgX4ekdffiO/1au9QY5e7aLOWkrTb40BV3MHnyvyXLDghBhffMNavvr17Ccr5lATwiQiCupL2m3uX1P2uN7e49AUEo6CkRkpakkJoVId+Q4YUHTTuanOXBe/frpAwjofppfRilGWLMobRYWMGXl2nO8SgY5pIS1eTVo/trY1ijTA2U18p6UED8Z5SovuvhG8+9ShTuWImBpDtvL5+uWgFJRqkPEs+4alyhbqWxoxkJhNQwTEbNmJ93rGK1eWaWzMhottLPM8mO6Bd6pW6lnTpVTaVLF2p8krFTKEUFtlEZOnobwGJ1beezZM7E093/IFzvxaFPqxEnyMipxkDySu5XS8YJ8eSQP1XdvvS7P1TfqQ/SgJHVp9aNTCjhnmv9G1kRKUek6i2JE69HqnwQ7bDB91gWgL2ulyVfdaK2mOrRzUpjik6J77z9qhwdyhl0SuGfUPVLA8Vpwdjk8+k7RHK4MsTMLbSNlLKWG+4t5YH54fFWmWFy3HmOE4KK5bp+K+XBk/ZohXJaUPNzdkHNCWHpEYAFFqdJctY925yFazFOjZUZeEXPnyAKXLqI968V8N0qLMwqEdc0PTcK9+aoonxwfJgMQZKH4tqmgHOX1J9Wl/pg2QMxBa7XEXTw0Ei+sPyTbnirup0RWulGinx+LSFU6HxkeiFc354j5bydww4iBqBaRs52l/q3muA3i+WHCShgp3sEwVYlpNmrfVMMye7umHXRUcvfQSR0v3zZLJJWdOqVzH/v4tQlsbz9dnWZhhdMmL29dfj93sqi2pVMtunGlnYAKxAL1lKJaV21obm5qq9gLZ04p4Jxp/hutZUdpD3RH7WcgstVNHKGxNpoSegFFniivrIQuVkrbCp9bgwCxtcbK2tboC7dhWwJpYWnIrcrlpUR6eRhogVe1Rlh3VrYPGjRM7P9ZZDQKldyhH1w+Fk8LN+p7u/Lw3626X9Ixgd54bc4ojIvW/UE9Uwi5Hw8dw7yvtmP9LVPx0uw0PL46Ew8v3yvfnkTg/12cLpcRoXlolAYItykFNlCaKObLvTgjVZ6LR8r09vZc3YnB9ydEUEVckG7S/JwknYC7aEgUPDV+0L+Jtp4UUaaPrcyUd5Og/Ou5wzFAiMmaVgGnnmPQPOLE8iVk2fv3pmwpIB/RBFaoulXwgqP4tkIi29YgPXYkBzFid5/De3eqV5HHgUlDkCH20N63dROmiyWv+vVzR6aYA0cpKtG8P8iTRqVjg9i+MTI+EXc9/xpItG34ZolY8zRO5snGeumbm1i4tBODaS891cxmS3a2hUoXa861tx/7pc2kO2BoAMryddfGXKgkbhzNgkUMVtyfoX3lbp3TnMCWee7YXVKJqBhddE23GuKbOhAoLiwU4f1BmJ0+rEMZZ9iegPpFv7/wOPKPl9q+QzboQVx4GIbFhDvdZ/SNH9fjcLFuIr/COjiq/TZFnYk7/SK+BuvAqbboSC7So2INtyAvT7mMiLaMzmme3Gnxz0fOddOVVon14GgtuWCftvmyJOAu+2QTFl42FkOFy5QkIblkeyPVil0VaNmSMM3zzX0OuV0DvNzbuYnp3oycAgRMvqjPF/NV+6DSLgyW7sSw8K9P4uCOLSIYIQTxKalSrCkOtA7csfxcvPHofTIwgXbt8fLyRsnRfFnl3hdfl6LvpXtvkUEMtz/zDySmjkTpsUK8dM8tMmDh6UXfoLGxHgtuuEzeM6x1TdH9W3+VVri7X3hNWGnbPgO0uT1tpzVYbKXlFAv5KpG2Z2EBTtXr0Jbs1wm3kPC2QAR/9/ZROWoQwv3bTOBVm6sg/ovJoooiVUN39BCBQVu/KxDhvm1iKGKYrv3Is3R/U9ijwCPBSZa2YzvaxGz7NzN9FTLEH0nnDUDK3DbBtn/pOpDgYBFnmpslJbXVNThWVITZsydachvX7SMC6oe/f0AAoqJjMFr8leyKiT6n9Mfbive+wqzRw5xGyNEfTW/+sL7dkJKg04q6FbvairXijoRd45Qrkf/BX2UFY+vBUQFZ48g6ZSp5e3aMBexKmNGabb2Z/Pq7ww/tXX3mPi9IrClnmMhSaQvxZtgPS68vu/M+fPyPZ8V8tgNSvE295CqsE9YxlSLjEvE7Icw+f+NlqKAEEnKX3f57Kd6onnury1S7HJW6n45eXj645annseS1v4OEG6UEIRavuOfBduJNFlj5W59a4JRY2/VGgXwNEmpKpJFAC/TXia8gsaxEX6TKap0oqqqpko+r86pC6ZFKhCUEIeZc+xF1llrhSPCNEmvgGbM2kjXiyy1CxPb3RkBAIPwC/PsCtdM9g34hlooJqv4i4uzCccPZfWqHI0zibWtukVjPKYo/55rxycvKwvjEaKcRccascJrX7fJ0oEcTJqz+t1zjzJSI67IRMyrQnqoPrdiL+dOGYaiIDnWEREELBaWVCBh1ls02tVd/hHXHAqcY01pvtA5cf2FhM5VoSZHmpkb4i3XcuptqqirgKRai124/qW3L2ha4XhdwJD5IsCmxRla22Cid1ayvhJoWoDnnJOwMRd3IW3R9HjWv92eRKaFLVklKNKevf4AHTlY3d9r9zoSb4Y30n8KVXUqGPCy9dlaXlKUc7Ln+w8LaNHrcOHvuos36tmvbNtx5/hSn+MNj3b5sfLN5j0UsY0KDccmEEe3en3ZmoMV9abHa3hRyFnXUhpXJ6lblFYzYq+6y6cK9pgTc+m8/R1FutklC6dNmYsiosSbLbVFgbQHX0VZqhbcyFG0xQbEIT46FvQo2w1emfrbrq3C/5i/LB7lhF727EeR6JberNcScEms0x4+iZ0msqTXsaB4fuXVnvJKGRdM2GnZTf22JcFM3kevBkedsvbF8PZIiBjj0O6ix4KP1CdAP/cjoaOs37CQtEpvvt+3DfRdMtes3Io+BShSwkF3Sdn24dZNzmk9kbiIXKv3cMxZwpDZqzxci7rSIKHcrynY5IUcWN4o0zS8sltGmaZfOMxdtn9erqShHWbHBXClNLxqFRc3eUmWlzttHv7uskaxmgSMhQhYjEiDkFiXR1k4EWaO3dtJGfpFukmN+cT7IMmeukDNHrBlze9JrGxNw3RFudoKwx92gH+w098VZrAg9BsINtCPwmpjr6RUygF2n7ai0XdAUgEax44itBZwSaCqaVAk0Y+JM/dKjuWuUtCKMrK2dpc6Em7H7yBqH6lIUrPnGJSxyylVaKZZWCRo2BrEXz7Op1U07JupnfZDYBWjMCF3Errbckc5z8wtAX9aah9pjC5yytjWJ4C6axzZpTJoj8exWX+Oi4+R9dCTLHFnlUi+JhXuY2Gy41cXalVijfVrJsmZOUm2puq4s3BQD+uFN4m35rv1WieZR7fLROQhQtKmrBiyYM4I09zXr4AFzqna7jhJn1EBX1jMlzmaLAAuZRrcXaLpM49+1zzGsYalwU/cra1z8jQ+BxNxGYZULEju0BIroTGdxr0rRVlQs5kOLAI3AMMTe+iTSxPp49paUUK8UEZzkggzRbOdob33tqj8k3iipP0K6qt9VeY8EHC0yS+uUpSWnISi2bwIPunqhvi4nESeF3OZ8kEVOuUGpH+QKJTeoJWKtq/5bYvHrqi1HL6f/2PSDn9yp1gjJdnQe3H8m0FcEtKKpK3FGfaL/p3pxRtezreNCoraV9Y7OVbKWhYPaIzFHXyTkaM0tEnOUaK6c7ugYrnoSbJQoKEFa2sQ6eLF36iJw7X1zehrPFeKP9dy8AoQ4qBVuh1g+hBK9ixKlMqMH37rlQlVWN6+KQCletM8/1nAUebVZ8PMIQKJfCnw9ejfaJq/2EFpMLGUX5hWBQM9gbfd6/Zzcq5a6VrvqlNqNQVn3uqrvauU8H87VRrzr9zUVwPDTh2+Blr4s2L8of6B9AAA+mUlEQVQX9bW1mHPHA/j504VIP3cO0s6eIRsuFFvo/PC/V3Ddky/IqLTFLzyFMy+Zi20/fIPSogIMTB6G82+/HwEDIrruiKix6etPkbF+DZrEyu6jzz0fR8RiohMuvBKDxozHoqf/aPLZPkFBqBPzfFa+/18cEXsCUwRditgKaOrVN8v9eA/8+jMy1q0RexQHIWv7JoycOgOHd20VfXsAMUN0lqwGMZ/pk7/8CdOuvUU+T9thCmR48Wbd+lUq31CY6fNb556Zcm1qLQrW+uWknt3VUU1y7661rav2jZVX7t8J2k+1WmzsrvYGjRNLR6C6TFa3tZVOL9ZaLWxKsFHnbB2UYIynOXn0c159/igiNSgo0O6tcWQxpERrv6lk+H9O5XfnaLEFrp3VrXU1anpwZVM5Xsr8E3aXbWrXj3lDHsRlsTfLvPqWOvzfoedxceyNGOSf0q6euRdf5r8LP/cAzI65St5y32bd0dj9vx3yiHjWDcaKrJJn7H2Ue3XPu7p5ctYQXdZowyovbKeN0F/2NB+Ofon09S8PO0XC3TJBoErM8TmweQOiBg9BeFw8giMicTwvB/WtSwrRbU1ij8PSwgLQVjxNJ0/K8+/ffBnjzr8EwydNw5qP3sGqD/6Hy/7wpImntGXvWrkMP3/2AUafc55YmT0W65Z8KIXcqHNmy0qdPfuU2Hrpk78+hgYhNM+8+CoxJesEtiz7Cicb6jH7tt+jXlhUsnduEd6vcAwW0XYxQ4Zjz7pV2L9xrV7AHdqyUezVeATRrYKurWe6M+0vRcrRBgQotyblK+uZNS1n1K41EonOvp4LSxYrabXSTPKX8+bEC5Go27h1h3y1oJgEoLFOul7Vuwb6+6tT0ObwliQlzNQ9FHBAqaqRlgUWv4dpezCR1NplLWKbp6BhQzDp70tkviN/Iy+LEuvSFan7Feswr0T/t9T/I2t12iIBR5Y3vcvUYK22RYf/hQOVu/DwiH9gZPAZqBKCbsPx5Vh46J+I9BqIs8JnoqShAKuLvsGFA6/rdv8/yXkT1w2+u9395w+ci4uMCLWQ/mJSWi8mU+9DIo7WtNvzboaMIjUVmNCLXXOppkm00Q9wEnHW/OvGpSC60Mt6ennh2iefh2cna0IZ4ph27TxMECKKUlnRUeTs3m5Yxej1jhXfIXXydCm4qIKPWHuRxKA5KXvHZikeLxdCMbl1hXe/4GApCM8WFjWVLrrnYQwk649IZFnM2PgTzvnN7XATWyzt/2UthoydCG+NaJAVW7/RLxR7FGXaPnZ1ftd5U7qq0iflat4cNKKOLHWUyFqnUoEQeCpltAo9dd3Vkbb/0qaA8XPkZWxKuu4ovpOwJDFJS6JQqty/Q14HiTr27iqVHe7km3blBAp4URa5sArdnuGlwb2/zFcn3etQpP4gkv/PxO8payeLBBxt6STnuxmIN+pUiXCdBnuGYuKA6fBwE/shCNflNQl3wNfdH36eAahtrsJzu++X/X9+zx/wm6T7cGb4DHyc8zq2lK7D0docBHuF4cLY6zA3/ney3qPbb8ao0IlYXrgEA7wiRZshaGxpwOe5b+NEQzFuS/6TrBfYPwSxvoPkueG3rwrewy8lK/G3Me/BzU23YvaG4yvwae6bIu99sY+cJz7OfR0bji1HXUst0kLG4/bkRxEq3K8lDUVYsPsuXBV/K74tWISjdblICRqNe1MWCBexX4f3mRZ5of7xFIFLrIjZrFfTjC6qq6/MJz0mQCKO5hbwfLgeo7RZAws2LQBtAm+Y0kI7BvukhrZFo5XUl+hvOV5/XPw/p00lR+vzDE/CBsZbJN7o/siEJH0zAaEDxNY7DfprUydkQSPr15hZF+mrxKea7pe+UutJ6VHdL6Wdq5Ziz9rlMpeWTqBUUVwoj/QtonXjbDofLsTituXfouBABgaIvRhzhev10vseoyKjiS3WRrFYLVMJJnWUDWsEntUeZNCQEpNKxNGxQNRRG9GrcoPb7OZSCV/VISWAqw/uhG43WBE0KYSpsf9N9I72/n7qvXp6NFvAkes0LirO5NIg06MuxiuZT+DOXy7ClMjzMC7sbAwPGqN3YTadOomZMZdj0eHXMXPg5UgKSMUXee/gq7z3pUUt0nsgVhd/g0XZ/0Z6yFkYEjACOTVisdnKnTgzYobYzqQfzom8GNtLNyA99ExMEhY9lY7XFyKzsuNfxMMD05ESMEpaAfeK8pHB4+Utq4q+RIBHsJyf958DzwiB+DkuirseIV7h+OrIQjyx81a8PuEbnDzVIIXlq/v+LMvPjrpAtvV21gv4Y+rfO7yP6o86yvXk/IPk8iozXglS2XzsJQL01xkJODKza/9S66XHcbNWJnDVkKvEdjcdBZwxUWcsj/6oGh85HtNjp2PhvkMme+fj39Ftpd0SurmpqcO9HsJqp5KpLXVUuTqeEu4rSo11NSpLWOA6/hww9eymVpFIm2L3a93OJzQ6FvHDR6K/r6++Ta0lMTo5BUHhUTjw6zqUxiYIoeqFwWPO0NflE9chQCKGvrTWOCXoFAVbCB01f5D6QIJMm8ha2N1kb8ufdPc9LLnPbAFXuLoaUZp9Rw0fMj3yIrihHz478j8hyt6TX17u3rg68XZcGf9bYenqjwnCOkcC7ozQs/UWs2sH3yUtddReatA43P7L+SisOyIFHOWNDDkDj6b9k05lojaTAtNkXZW3pvg70Jdh+mjqRgwLShfWuyhsKPlBCriqpgopAu8ZNl+4eSv04k1Z89ICx4Isf9vK1iHaR2eO/U3y/bgy7lbZ/NHaXOwo3WjyfQz7QLtOHK8vMMzm614iQPMkSMTRvBi2LvQS5F5qlixtc5PnYnHWYoufQPfNHTJXc59pAaepJE/dxdY3J8W8N5UqjpleHFTVMefo4dVfuC4DkJe5GxNEEASlwkP72t3a2bODhRCjlDz+LMQNHyHPjx3ORvaOX+HTuu2gzDT4NnLqudixahkqSoqQMnEqPPr3N6jBl65EwB6FnKGQ7Ml4+McPQcI19zq8e7g7DMwWcLRHaEoXa7xNi7wA9FVUn4+dZRvxY9FifJD9mogSbcHVCbd36N91iXcLy9k2fJL7Bg7X7MO+Cp0abz7d9hfwECHWukqzhGXvAiPz6nzcfYSodMO5MZdgWcGnuH3I49h0YrVsblL4LBktSxd7K7bgr3vuk/ktp3XbVRXU5egFXJL/cFlG38K8I9FI+4GZmcgKl7Gjo1XBzNu5WjcI0HwDXuS3G+Ds4BYSYZllmUZdqca6R6KPLHfG3KzG6hvLC4mMxt51q5E8bpIQcrUi0OADY9W6lTd+9sVY/8VH2C4iWGNE9OqGzxe1a6ezZ5Nw8/zwf1j78TuYdt2tcn/Fb1//G7z9AjDp8uvbtaO9GCbcqPTMnN3luOYx3TIR2nI+d00C9iLkyJ2c9vCryHjx/h4NhIdvAFLuedYlhZsCp5sUpq66eSRL1n8PPYdsIcIoRfvEYc7Aa/CPcZ8iNXgs1pf8aLTl9w+/gse334o1IrCBIkuvG3Rnh3p+nh1dDoaVyPVJUa2GX2rO27SIC1HdVIkMIRbXCUvcpIiZcpmTBhEVSylKWNoG+ibIr3i/JFwWfxPifdvmvHj189Y/sp+wMnKybwJqPhyJOO2yCPbda+4dESDXaG1zbZcwSLDNnzhfflkk3uT8uPbNT7/+t3JO2/t/vh+fPPe4XNajrQbNp0PrvLrWXCNttJZ0OEy4eC5Gij0ZKWr1g/kPisCC9j8/Onu2j1iw9Mo/Po1aMe/tk2cfxXtP3CcWk42Qy4SIOSXUqQ7Po4yQqBgZZUvWvzjhbuXEBLQESMhNenutfj4clck5cmKe3MbfTpMuV3K79mYiEafm43XnORGTz8eEf33n0uKNuJltgaP9P2mTd2PbY/mKNd9WF32NZjHP7Z6Up/Xj4eHmLoIBwkVwgG4BQVVwWpw0C6vcF0feBVnP1D05NQdllVOnT6mqVjkO9E1EcmAqVhZ9IZc5eWzky7LdSJ9YeUz0G4JrE++S59XNlfg6fyGCLYhgpfcxlWhdONqlgVPfElBz4NgS17fcu/M0Em1LspYgo1QESYWl4ebhN4MCGkylju5SUzU75l9y36MdMmOHpeH3b3yEarG1lF9QKPp5uGP6jbfp6z28qP30jDMvuwb0ZU5y7+8pBdeMm+5Ac+NJMXfND/+cd5n+1q6eHZc6Ene89q5cD87T2xue3j76e9NnXQD66pBOnZbz7tLF0iVu7u0FY4e6nOGyBIxZ5AiGcm/SUYksqmvtpNpUzzO3fbLetQsKMfdGJ6xntoAbfVcsNj9RYFTAkVA7RwQx/HB0MShYgZYM8RLuy+0iunT9sR9xw+B7JDoPN91cjB3lG4RACpVz046LSE+y4NGyI68fmC/rNYngAVOpfz8vHKzcjbyQbJC1jNKRmkPSsmZ4T4R3NFICR8vs6SIA4q1DL4h+eYsAC90GzjE+CSKqdBS+L/hYWA0TMDRwJD7IeRXbTqzDRQNvQE1ze+Fp2L72fWjJkjARKWss0RZbnPqeAIu4vmduyRO1wo1E2VXJOleosQAFatca7lKT/RMWrQCxnpolqbK4WKy7dczkLV4+/ogcrPsZRcKLvlqadFM02t1kxrN9g0Pa3WLqYu+a5cgTS4mUi02+R59rRNyZupHzXZaAVsgRBK2gUud0pCCBgKFiKZLWJUusIaKorWLhCm2u6/x3LfXLFYMU6L07S2YLOFrLzCu2AGRRUovVahu+Q8wv8/cMxI9Hl+Cn4u9lUYBwf14vxNtVCbplQSKFa5UsYRRpWnWyHDclPYD3s1/BTeunyfoUCVorRNOBqj1iTpvYok2ItX5iDps2TYu6EN/lf4TC+iN4ZfwSWbTp+BrQl2GaFDkLj7SG7U+NnCMF3PSoi+QyJ6rug6l/w6v7nsTLmbpQ+8SAIXhg+F+FwBwg+qKLHqN5dCq1nYmlBQze57fJj6hq8kisAiac1u+P2q6QL/qEAIu4PsFs0UOUcKObSLTNn6D7w62zRshdapGrtLPGrFSWs2e7mDu30mRrEQmDMXvwve3KKYI1OmkofI1Eo7ar2M2L3IydKMw6iIvufgiBERHdbIVvc0UCyiJGR+VCVQKOeFCEKH0VGMAhYUWJxJ1KSuTRtXYJEFVuabSpKy0NohiZc7R4K60Vd2TA2BZa2oeVnTwutqs5ZdIiRW5KmvNGS4NQOtFYLARTuBBW7tpmTJ7Xi/Xa3N08pMAzWcnCAmqzUVj+gj0tN5cZvg89msRbY3AVZv236yAMC7vK1btBQK3g3dcrtnejq055ixJtyk2qrG3GXpbqKhdqd92lry1dB6+QAaBN2zkZJ2BsKy3jNTnXlQmoZT+qD4rtw3qwzEd3GbJ4M03OYgFHuzHQ4rS0JpwxS5zpR7lOyb6CDJAOZPFmX2POIs4247H40GK5NAiJsdSw1C4taVSfIlF7El3KAq7zsS4uLMTgYD9cMWFE5xW5lAkYEFCCTmWTsKPUlbhTljqqq6x1ancICp4wlni+mzEqbXkWCzh1q9oTlYWcIiI+wCLIIyMrAyNviWW3aRsWuzpjEdd3w6Esad21ovWkpxR9TAEso8eN60kzTnsvWd9o5xI1xcBpX5RfzCYEtDspmDNXzlDA8Xw384at2wKOmidr3PZ/FiDQLxDNZXBZixwJt8LKAml1o2AP3vvUvA+frWqxiOtd8spdSk/pzFXau72A3JHjl8NHkTR0aG8/yqHaJ+vbqIggFm8ONWrO3VmtgGOXqflj3SMBpx5D1rgGsQLIoQ0FLuVaZeGmPgGOd2QR1ztjpnWXtt8ZoXee11Wrapwjo6MRFRPTVXWnLa+t1gVklR4rxvjEaBZvTjvSjvliSsCxeLNs/Kwi4NQjySJXLL72vKsTcpTvbPPklGgrP14JWhuPLW5q9B3vqH65syup52NnL1Y3Y29C41xzshm/7MsyVuwSeXHhuuCsC8cN5y3mXGLEHeslM158ALEXz+P13SwcNqsKOO2zySrXUgpkflOAkPAg+LsHIlDs32dsIWDtffZ2ToKNErlIWbTZ2+j0vD80V2r5rv1IihjAVolu4lywWbfori3dpd3seq/dRpZISvZghey1l+SGmQATsCmBXhNw2rciMUeJLHOUlKCjc3sSdUqsFRQXoJ9Y8FwJNjofOY/nttF4OWsiK012yQnQPqq0FZdhIqGXXSTK04cZFrnstb25S+1pIF7f/brszj2jdIuY21PfuC9MgAk4B4E+EXCGqJSrlfILV1ej9EilFHV0TZY6lUjcUbKW1U4JNGqzqqaKDqhp0R2VWKM8cotS4mAEicFlvnXmUu2szGUAaV5ULvVRLpb6aN09QVPk8qcq+tYeFx92+cFhAEzAiQiYvRODNd+ZhJESR6Pm6VomUUeJ5tBRIvfr8YMFKNmvu5aZrd/IgtdVOlnTjNr69pti05w1lSLnBMjTYelKrPGCu4qNqx7JupYUPUC6VEmwGbO2rRDuVkrGylyFG7lMU0NSzdpBwVWYaN/zjd1vaC/5nAkwASbQKwRsIuCMvYkSdOporI7KU2JPXRs70mLDs15N0wtFY3U4jwkYEiD36V2zp8glKB5+7ysY27mBRBy5VO86b4rh7U59rQIVSLzx3C7jQ02WyZL6EuOFnMsEmAATsCIBuxFwlryTOSIvakybtc2StrkuEyACysJGi8FSlCoJNm06XHwCb/y43mFEHIkvw2TJ3qLKLWiLRXkN+22v18RocZYueIH6aAlfe30n7hcTYAL2S8Amc+D6AgdZ6fYsLMCMV9g12he8nfkZL3/7EwrLKoy+4uAoYbHrY0ucEmOZpZmyT9vLtuv7ll2WrT/XngwI7BiYcaKqvShV9ZNCk+Tp2NCx8pgp5rrRHqYs3hQh40fav1WNDdX4bM5nxityLhNgAkzACgScVsARm0XTNuKGtZOsgImbcFUCZHkjK1xnqbdEnBIDJNSUSCOBpsRYw+kG+IvN2v0D2zZsp+uepprWRV9rqnSLv1J7zWJOKSUl+mYNmoVg92CZx+5UQEXkSiCt31jAaWnwORNgAtYm4NQCbtUDYl9SXv7D2p8Zl2nPHPGmYFhDxCnB9uGhD6EVah7+HnqRZg2Bpvrck2Px0WJ5Owk7Q1HnaoKOxo2sb4aJBZwhEb5mAkzAmgScWsCxG9WaHxXXassS8abIdEfE0S//JVlLQNa0yuZKeYwaGCUta6pdRzkqUVdcqBN35HKl5OyC7uplVxsdIhZwRrFwJhNgAlYi4NQCjhixG9VKnxQXa4aWEaGklg0x9/XNEXFKtNG8MnKHBsfoXJH2Yl0z9107q0duWHLBKguds86fM5z3pmXCAk5Lg8+ZABOwNgGnF3DsRrX2R8Y12yOLHO3EQInOKQrVVDIl4ki4kXuULG3kFiVLm6skss6RZc6ZrHLG5r1px5MFnJYGnzMBJmBtAk4v4NiNau2PDLenCHQm6kL8ffH4lbNlVa1wI2ubM1naFAtzjyTk3GvdZfXbht3msEtt0Jgam/em5cACTkuDz5kAE7A2AacXcASM3Ki8qK+1PzrcnikCn67fhrLaOmGlK0XQwFJke/yK6IRwKdxqC2vh5uEG3whfU7f3Wn5VbhVOt5w22r5PhA/6B/Q3WtYbmVqLnCPOkevMdUq8aA042kqLExNgAkygtwg45EK+lsLgRX0tJcb1e0Lgminj5Hpgazb9C1HhURgycJC+uV+f/xWe/p44+/mz9Xm9dVK8uRiFmwox9ve69dyW/XaZyUeNvXcshlw+xGS5NQoOfHoAnn6eGHzRYOk+JhfyuqPrsHjZYil2HGXhW3KdkgWOExNgAkzAlgRcQsDRUiK6RX15dwZbfthc5dnKvZY8LNmm7tLspdk41XSqHfYhFw/BkCs7CjXvEO929XrjYu97ezHilhHtmlbzAN/a/xZenvRyuzJ7vKCx1e62YI995D4xASbgGgRcQsDptt4qAM2HM2cbLtcYen7L3iJA7rWomK6XAilYW4DDPxxG5NhIZH2Vhaa6JsROicWYe8fA3csdVJ79fTaCk4ORsywH3qHeGHT+IKTMTZFd3/GfHTjdfBpj79NZ2E41n8KPv/sR6XemoyKrAmSBa25sxsq7V2Lmf2bKe/oH9UdAXIDRVz++6zi2vrIVUxZMQUC8rk5dSR3W/mktxt03DhFjIpD7Yy72f7YftUW1CE4KRvpd6QhLDZPt/fTgT4g9OxZHNxzFiYwT8Iv2A1n26L71f14v+7Lvo32gNsfcM0bfBxJx5FJ9bNNjeH7i8/p8ezwhK6Hh3Da1/+nao2vtscvcJybABJyUQD8nfa8Or6WscB0KOIMJWJEA/TIn8aYsS5013VjViKLNRdj/6X4MvnAw4qfHg6xmh5celrdRefG2YineRtw0AmEpYdj55k7krcqT5XXFdagpatstgTKr8qrQVNOEqDOipAgjkTXsmmGyPn2rO1aHE3tPdPg6ffo0QoeFyvK81br2qX7+T/myzdDhoaD8TX/fJAUgCTcSnCt/v1LeQ3Urciqw7V/b5Fw6EpEtDS3Y+MxGOe8u6ULd9lxR46IQNzWOqrdLxKvWu1aKuHYFDnBBFrl7Rt0jhZ2juIEdACt3kQkwgS4IuIQFjhiQ5a14B89b6eLzwMU9JEC/zNPPSLeolanPTpXiiW4iQUfWM20a/4fx0rJFeSSSDiw5gPgZ8doqHc5DhobAL8pPulBjp8Xqy3NW5IC+DNMV314BT19PKSKPrD6CtHm6PYRzV+YicUYiPLw9kLkoE0GJQZj89GR5e+L5ifjq0q9w8IuD0hJHmdFnROOsp86S5TTf7ZfnfkFDWQOiz4yGh5cHQlJCMGBUx31Z6QbaEux0g/EgC9mgHX4jwa6WRqHuUeAC5WWW6faptcMuc5eYABNwEgIuI+BovCiYgd2oTvLJtcPXoF/cKXE696Yl3SMXqUp+kX5oaWxRl/JILkiVyIKV+XEmyGLWnZR0QRKSL0vucKuHj+5HQcKsBBz+8TAoYrWfZz9UZFcg/fZ0+bzK3ErQXLn1T65vdz9Z/VQKGRKiTuEbrou0bW7Q7aOqLzBxQsurFBQWyAABR7FkZZZnIjUktd0bUVQtBzm0Q8IXTIAJ9AIBlxJwyo064xUOZuiFz5LLN5kaloplmcuQHNVRIHUGp5+HZiaDW8eaZBlTySfcR3eqYhM0Oq7lZHvhp+7RHmkeHblVTaXw9HAp0vJ+yoO7hzu8Ar0QMTYCp07qHkiiLCC2bQ4dnftGti2JQlY2fdK8lj6vixPaUsxREok02k1j/oT5HbrsKAK0Q8c5gwkwAYchoPlp6zB97nZHOZih2+j4RjMJeLtZP5qz7ECZPlCALGIkwNzc3aSFrLGiUd+z2qO1+nN1cvqURuGpzE6Obm5uSJydiIJ1BfIZCTMT4NbPTQZVkJjz9PHE6DtH61vY/9F++A5oE3D6gm6e0BZcjiJ+Mksz27lPu/nKfBsTYAJMoFsEuvE3creeYzc3KSuc3XSIO+I0BEh4BHkEgUSINdOet/eg/GA5Dn5+UEalJs5MlM37R/vjeMZxFG4oRMWhCmz/1/Z2jyULWnVeNUozSvX5lTmVMjCBghO0X6WZbXUSZyWC3KUkFulcpaSLknBs1zHs/1hEoRbXgsTbrrd3wd1bt7OCqmfqSJG19JyqI20uV21d4pYUqgt20Obb6znNd3TERYjtlSf3iwkwAcsIuJQFjtBwMINlHxCubRmBsaFjsaZwDfxT/Lu8kaxdhomsXTDIpt0Tlt+1XFYdesVQpFytm2dHc9mKthZh3VPrZBnNb6vKbxNHsVNjcWTNEay8byWu+OYKWadgQwHoyzDFT4vXBx8EDQqSwQq0hhwFQ6iUekMqGisbseutXfKLdpMYcfMIGfGq6mj7rt5PHcmaRwEP1Uercd7/ztPfok7qi+oxKXySurTro2Hwgl13ljvHBJiAUxJwia20DEeON7g3JMLX1iRAv9x3N+6GT1TrfLVuNp79bbZcl+2aVdfISM7+gf3Rbr5ca7sU5enh6yEjRQ0fRUt5nBb/KIrUWonWm6Nndmc7sOa6Zrh5CpesZ3urXcGBApwTcY7DWLRojCmxBc5anypuhwkwAUsJuJwLlQCxG9XSjwnXt4QA/VJvrm2Wi9Nacl9ndSn4wJh4o3uozJRAI/emqbLOntdZGfWjO+KN2iShqRVv5DZ1NPFG78HuU6LAiQkwAVsScEkBp3ZjoCVFODGB3iBAOwpM9Z2KnVt2dlvI+YT6IDwtvDe6Zxdt0u4LWfuzHMryRuDYfWoXHx/uBBNweQIu6UKlUd+9MA/Hd1Zjxiu6BUtd/pPAAHqFAC01Qft8tvi1mLU7Q690ws4aJatbRWGFDPi4cciNDhN1qjAu2LxArv3G7lNFhI9MgAnYgoBLWuAIdJTcmYEtcLb40LnSMykylTZpV9a4+uL6blvkHJ2bcpeeLjmNe4bfI/c9dZQlQ7Tsae03Fm9aInzOBJiALQi4rAWOYHMwgy0+cq79THK/VbRUYEXOCrP3THVkYiTayFVKR1oixBEtblr+HLygpcHnTIAJ2JKASws4mgO3Z2EBu1Ft+Ql04WeTGKDJ8JSiYqJ0R7Gpu6MnJdpoUWNaF4+OVyVf5XCuUmPjcPWyq+Wm9cbKOI8JMAEm0JcEXFrAEehF0zZi1qtpcn24vgTPz2ICigDNk6NV/beXbUd2WbbcT7WypVJu7k77g9pzIrFGqaaqBs01zThRdUJvaaN8R3SRUr+NJRLctPepsa2zjNXnPCbABJhAbxJweQFHblRKHMzQmx8zbtsSAspNpwQd3TsgcAA8/HVrufkH+qOvhZ0SatQXcomSVU2JNcqjBYxpL1hnEmz0XtqkxoXnv2mp8DkTYAK2IuDyAo7cqCvuz8ANax1jBXhbfVD4ubYloKx0NH/ucOVhaalTPSJxR0kJPJVPRxJ75iSyoKlEYk3t6UoijZJ2iytXEGuKhToS/wWbFrD7VAHhIxNgAjYn4PICjkaArHDh6QEYNS/e5gPCHWAClhIgcUGJ3LCGiax4XSUSa6khqfpqZElTyZktauodzTmy9c0cSlyHCTCBviTAAk7QZitcX37k+FlMwPEIcPCC440Z95gJODsBl10HTjuwtDND1JggubivNp/PmQATYAJkfZubPJdBMAEmwATsigALuNbhkPujvltgV4PDnWECTMD2BHjfU9uPAfeACTCBjgRYwLUyUVY43h+144eEc5iAqxJg65urjjy/NxOwfwIs4DRjJK1wYmFfTkyACTABIkDWN21QB1NhAkyACdgLARZwmpEgKxwltsJpoPApE3BRAsr6xpG4LvoB4NdmAnZOgAWcwQCxFc4ACF8yASbABJgAE2ACdkeABZzBkLAVzgAIXzIBFyRA1jcOXnDBgedXZgIORIAFnJHBYiucESicxQRcjAAvHeJiA86vywQcjAALOCMDxlY4I1A4iwm4EAG2vrnQYPOrMgEHJcACzsTAsRXOBBjOZgJOTkAFLzj5a/LrMQEm4OAEWMCZGEC2wpkAw9lMwMkJsPXNyQeYX48JOAkBFnCdDCRb4TqBw0VMwAkJsPXNCQeVX4kJOCkBFnCdDCxb4TqBw0VMwAkJsPXNCQeVX4kJOCkBFnBdDCxb4boAxMVMwEkIsPXNSQaSX4MJuAgBFnBdDDRZ4Yp3VPLuDF1w4mIm4OgEyPrGiQkwASbgKARYwJkxUiNvicUe3iPVDFJchQk4JgGyvqWFpWHukLmO+QLcaybABFyOAAs4M4Z81Lx4tsKZwYmrMAFHJpAakurI3ee+MwEm4GIEWMCZOeBshTMTFFdjAg5GgKxvmeWZbH1zsHHj7jIBVyfAAs7MTwBZ4SiV7Kw08w6uxgSYgCMQoLlvbH1zhJHiPjIBJqAl4HZaJG0Gn5smQOKN5sLNeCXNdCUuYQJMwGEIkPWNEs99c5gh444yASbQSoAtcBZ8FNS6cLsX5llwF1dlAkzAXglw5Km9jgz3iwkwga4IsAWuK0IG5WSFW3F/Bm5YO8mghC+ZABNwJAJsfXOk0eK+MgEmYEiABZwhETOulQVOzYsz4xauwgSYgJ0RuHrZ1fhszmd21ivuDhNgAkzAPALsQjWPU7taJNz2vFvw/+2dCXiV1ZnH/yEJIftGIIEEIgkQEjAhRnFhUVlEW2unLU8X7Vhb22rHraszPm2pdrGdp6Ntp06tnbG1rY4FxqJtpQpKER7ZFEjYAiSRJWQhgaxAErLMfc/l3Nx7c29yb3KX77v3f3yS+33nO+d87/mdSP5537NwQYMDFd6QgHkIiPdtVT73fDPPiNFSEiABZwIUcM5EPLzntiIegmIxEjAgAZ55asBBoUkkQAJeEaCA8wrXYGEdPuW2IoNMeEUCZiBA75sZRok2kgAJjESAAm4kQsM850H3w8DhIxIwKAF63ww6MDSLBEjAKwIUcF7hcizMbUUcefCOBIxOgN43o48Q7SMBEvCUAAWcp6TclFNeOMuCBiYSIAFjExDxRu+bsceI1pEACXhOgALOc1YuS4oXThY0vPXIQZfPmUkCJGAcAlx5apyxoCUkQAJjI0ABNzZ+qrYsaGjY28ZtRXzAkk2QgD8I0PvmD6pskwRIIJgEKOB8RH/5z4vUOak+ao7NkAAJ+JgAvW8+BsrmSIAEgkqAAs5H+PWCBm4r4iOgbIYEfERAvG+SeGC9j4CyGRIgAUMQoIDz4TBwWxEfwmRTJOAjAjyw3kcg2QwJkIChCFDA+XA4tBdOn5Xqw6bZFAmQwCgI6G1D6H0bBTxWIQESMDQBCjgfD4/eVoShVB+DZXMkMAoC4n0rTC8cRU1WIQESIAFjE4gYsCRjm2g+68QD17SvA0t/VmQ+42kxCYQIAc59C5GBZDdIgARcEqAHziWWsWXqc1IZSh0bR9YmgdESOHjuIDftHS081iMBEjAFAQo4Pw0TQ6l+AstmScADAuuq1oHbhngAikVIgARMS4AhVD8OHUOpfoTLpknADQGGTt2AYTYJkEBIEaAHzo/DyVCqH+GyaRJwQ4DnnboBw2wSIIGQIkAB5+fhZCjVz4DZPAnYEdDbhthl8ZIESIAEQpIABZyfh1Ufdr//d7V+fhObJ4HwJiDijd638P4ZYO9JIJwIUMAFYLQZSg0AZL4i7Ako8Za/Kuw5EAAJkEB4EOAihgCO84tL3oUceq9PbAjgq/kqEghpAly4ENLDy86RAAm4IEAPnAso/soS8cZQqr/ost1wJcA938J15NlvEghvAhRwARx/7XnjBr8BhM5XhTwB7vkW8kPMDpIACbggwBCqCyj+zmIo1d+E2X64EGDoNFxGmv0kARJwJkAPnDORANwzlBoAyHxFWBDgqtOwGGZ2kgRIwAUBCjgXUPydJaHUjJJEMJTqb9JsP5QJcM+3UB5d9o0ESGAkAhRwIxHy03PZWqRpXwfO7Gvz0xvYLAmELgERb4daDmHVTG4bErqjzJ6RAAkMR4ACbjg6fn6mTmngBr9+pszmQ5GAhE4LUwtDsWvsEwmQAAl4RICLGDzC5L9COoyqN/v135vYMgmEBgEuXAiNcWQvSIAExkaAHrix8RtzbYZSx4yQDYQRARFvXLgQRgPOrpIACbglQAHnFk3gHkgodePDBzkfLnDI+SaTElDz3nhclklHj2aTAAn4kgBDqL6kOYa2JJQqixqW/qxoDK2wKgmELgGGTkN3bNkzEiAB7wnQA+c9M7/UkFCqbC3y1iMH/dI+GyUBMxNg6NTMo0fbSYAE/EGAAs4fVEfZpl7IoBc2ODfDLUecifA+XAjIvLfVC1aHS3fZTxIgARIYkQAF3IiIAltAQqj7f1s7ZD6ciLoKbjkS2MHg2wxBQLxvqyzz3orSOL3AEANCI0iABAxBgALOEMPgaIQctWW/qEHEm4i6xr1tQ4SdY03ekUBoEeC8t9AaT/aGBEjAdwQo4HzH0mctyVFb8+7Jxv7LHjcRbzrRC6dJ8DPUCXDeW6iPMPtHAiQwFgIUcGOh58e6elHDpocPOLxFvHBMJBAOBNR+b9wyJByGmn0kARIYBQEKuFFAC0QVWbBwxrKtSOO+9iGvc7fIYUhBZpCASQnoeW8869SkA0izSYAE/E4gyu9v4As8JiCiTUKkI3nZRNgxkUCoEhDxJhv2rr6Gq05DdYzZLxIggbEToAdu7Ax91kKDRcCNJN7kZVzM4DPkbMhgBA6eO6iOyqJ4M9jA0BwSIAHDEaCAM9CQyLy3O7dcrxYwjGQWFzOMRIjPzUhgXdU6tWWIGW2nzSRAAiQQSAIUcIGk7eG7RMjJKtThEr1ww9HhMzMSkNBpYWohOO/NjKNHm0mABAJNgAIu0MQ9fJ8n3jh64TyEyWKGJ6DnvVG8GX6oaCAJkIBBCFDAGWQg3JkxnDfOk/ly7tplPgkYiYBsGfKJ/E8YySTaQgIkQAKGJkABZ+jhsRo3nDeO56OaYABp4rAEHt/1OI/KGpYQH5IACZDAUAIUcEOZGDbHlTeOYVTDDhcN84AA5715AIlFSIAESMAFgYgBS3KRzyyDE9Dno4qZcnaqHL/FRAJmIiDiTUKna25dYyazaSsJkAAJGIIAPXCGGAbvjbD3xm1/str7BliDBIJMgEdlBXkA+HoSIAFTE6AHztTDZzX+1U/usXjgknDdv+WHQG/YhXAgIPPeuGVIOIw0+0gCJOAvAhRw/iLrx3arG5uHtF7+0xOYfFMKMq8KbCg1b/LEIbYwgwSGIyChU0ncMmQ4SnxGAiRAAsMToIAbno+hnr65rxIbyyuRmuxapF3q7FX2RicE5ojb8VHj0Hi2BcuLC7CipMBQrGiMdwTkCCudDp09pC73nNujs2yf1ec8D9fnpeXZ6slFaVqpOuNUrvWWIUVpRXLLRAIkQAIk4CUBCjgvgQWr+C9e34rOnl5MyzdemLShrg6N9fW4b+VC0CMXrJ8Qz96rhZocWdU10AUtyCYmDXpSoy7/AZCQlDCk0YTEoXlDCl3O6OzodHjU2T5433v5j43m9kFvsgg+EXmSCtMLQXHngI83JEACJOBAgALOAYcxb17ZdQA1reeROWWKMQ20WCUibnxvDx66bZFhbQw3w0SsiTdNPGn2Qk2EW+bUTIXDG0Hmb34NpxvUK5IjkyFi73TbaUxNnorrM65X+Qy5+nsE2D4JkICZCFDAGXy0JGwq4i1h4iSDW2oVcVdOSmY4NUgjpb1rfzz2RyXYxKtmVLHmKSLx4mnPXUOdVeCtyl+lqlPQeUqR5UiABEKRAAWcwUdVQqcxqRMR70XoKlhdOm/5Zdvd0kwvXAAHwN7L1tbbZhNsRvKs+RKHFnTipTty6giWX7EcKZEpXBDhS8hsiwRIwBQEAjPb3RQojGnkqaazKJ6Wa0zjnKwSkVl19IhTLm/9QUCEm/a0iViTkGh2YrY/XmWoNqWvWpyWZJZg/+n9EDG3dsNadRwXvXKGGi4aQwIk4EcCFHB+hGvUpj/Y9x4+KH8fN9/9ZZ+bKCtkZZsTLmbwOVrVoBZu4m1LmZKCkrwS/7zIJK3quXwi5rae3kohZ5Jxo5kkQAJjJ0ABN3aGpmuh4h9vou/SJdPZHc4GOws38bYN9A2g/Xg7Ok51ID4rHkm5SRhn2dolGKmvuw+dtYOrTJ1tSJye6HfbRMzJF4WcM33ekwAJhCIBCjiTjmpL/Wm88ZtfoOF4FWITkpA7bz5uuuuLGB8bq3p08J23sPtvr6C1qQEZ067ATXfeiykzC7Dz1T/hg4r3cam7G3/87tdw1xNP4cXvfQMlN9+KosVLVd26Y5X4+3M/w6e//RP0XurG2p98F1feuAL73t6AnotdmHX1dbjR0l7U+PEmpWcus/WZofkF+bYwacvRFmxbvQ0XzlywdSYqJgqLfrAIk0o9W/DSsKsBdTvrUPqgdesOW0OjuGiracPGBza6rbnyuZVIznO9f6HbSl48EPG455d7sPjHi5WIo5DzAh6LkgAJmJIABZwphw1KvF3oaMOKe76CC+1t2PzS84hLSsaiT96Nyne34PVfP20RWtdj/vIPY+/GvyqR9uWnn7cIvVIc3fUu+vv7cc2HPq5633TyA1y0tKXTpYsXcLauFn19vbjU06Oupf0FH/44omMmYOdf16Gvtxe3fPEhXYWffiIg4m3rha0oudouVDoAbP/BdsQkx+DaR69FyqwU5Yk7+n9Hsfmbm3HH2jswIW3CiBZVv16N/kv9I5bzpsA137gGE+cO7imn64qH0J+pYXcD6nfXO7xCh1flzFXuK+eAhjckQAIhQIACzqSDeK6hDjlz5qLguhstoalIxCenIibO+ktyx6trkJE9HXc88pjq3dzFy/Cf930Ke958DTfedS+SMiarEOqsBTd43PvS5R/C4k/fo8r39nRjx1/W4eZ//pISdB43woJeERDxVtFdYduzTVce6B9Ax+kOzLhlBjJKMlR2emE6yqaVISUvBZfOX7IJuONvHEflmkqcrz+vnpXcXwIpe/jFwxAPXG93LzZ9ZROufexabP3OVtzwvRuQND1JtXnsz8dQv6sei59crDx9Wx7dghm3zUD1X6px6cIl5CzMQfH9xYiMidSmIT4zHok5ibZ7+4vtT1hEZ2qMg8ev4jcVqi/y3q5zXdj7y71o3NuIyAmRyFmcg3n3zkNkdCRqt9Si5u81mFw6GVXrq9T7sxdmY/4D89F8oBkHfn9AvWrDPRuw8ImFNhtExMmGxI/vfByrF6zm5sD2A8JrEiABUxOggDPp8M1bsgw7XluL6r27kF+6wOJtuwFXlFwN+eXeVHtCCbo//8cTDr0Tr9po07SiQQ9QzpwrlYBrPnUCWfmzR9sk6w1DQMTb5jObkT176MrSiMgI5N2WB/GgtX7QipxFOchakKVClHM+M8fW6sm3T2Lnv+9E9qJszPzoTBxbfwybHtyE21+6HZlXZ+LU1lPq56XgkwWQOWztJ9vVp26g62wXOk52qFv9fN+z+yDloyZEofLlSvT39aPs62W6ClprWjEu2nEeXlRslBKPKTNSUPHbClz5xStV/f7efohIlPZkPt/mr29GT0cP5nxqDs6fOY8j646g92Ivyr5Whu72biUmW461YNbHZuFi00Uce+0YkmckQ4Rc1tVZOP7WcRTeWWgTr9ooWbUq4WcRcWtuXaOz+UkCJEACpiZAAWfS4ZNQaeaMWajc8Q6q9uzE4e3vYN6S5Vj2uftVjxLTJyIta/CXv1wnpVm9Na66PDBgictdTr0uFjhIeFanBEvbkvr7+nQWP31M4N2md9UqU3fNXvXVqxCTEoPqv1aj/H/K1deE1AlY8OgCJc6k3qEXDyE5N1l51eQ+d2Uu1t+xHkdfOQrxxIm3TEKo2Uuy0VY9GEKXsu7SzDtmovhLxeqxiLrDLx9Gyb8Mivu9/7V3SFXxCt7y3C2Yvny6EnD1O+qRc2MOGt9rVB7A6cumo+7dOiUgFz2xCFNusJ44EpsWi4rnrYJPNypz/NIK0tSteAdbq1ox859mIn1OuhJw0parJCJONjaWxSA8ossVIeaRAAmYjQAFnNlGzGJvb3cPdlvmoeUWX4XbH3wUvZZ5aq8/+xT2b9mIW77wICYkJGK8Za7aks983ta7XRZvnRZekmkv2CKjx1sWJwxOhm9tdJxLJOVrKw9g6uxCuUTzyRr1mWGS/emUsSb7JsdIlcwaFEbO5keMi8C8L8zD3M/PRcuRFsgcMBFmW/51C25+6mZMvHIi2o63QUTdtm9vc6gunrbRpsnzJ9uqyrUIOFkJq1PZI2UqRKvv5VMWV0iKmxyHjKIMnPzHSSXgxEOYPjsdCVMTcGrzKVWm6i9VqNlg/fm6ePaiyus83ak+5VtKfortOn5yvIPH0PbAzYVsuyJ75z254Ek3JZhNAiRAAuYh4BjrMI/dYW1pVMx41Ozfg00vPIszJ2rQ1tSIznNnkT4lGxGR4zB/6a04eXg/dlnmqbWfaYSIty1/egHRl1eNRkZFqYUJp48cUhxTJ2fhwNa30VhTjVMHK7B13R+G8N331gZ8sPc9VL23A1tefgEzisssK17jhpRjxtgJiJfI/nB55xbPVZ7D+0+9j76uPkRERCiPVOFnC3Hb729TYql2ay36e6yLE+Iy4pCYnWj7yv9IPrKuyXJu0nYvIXid+nqGelhlDptOsRnWFc8SCtVJ3iUeN/uvhOwE/Ri5K3Ih9vW096B2Wy1yb8lVz3q7etVn4rRBWycVT0LBqgJEx0fb6jtskxJhy/boQrxw+kxYjyqwEAmQAAkYmAA9cAYenOFMW3rXl7Dxt8/ghcesK0GTMzJx+wPfVFWuveNTamXqlpd/ZxFbv0NSegYWfvxO5bGTAjJfTkKuLz3xLTz03J9w42e+gD8//QP8/jsPq/pX3/pR7N6wXl3rb/EpqVj30++p29yiYovn71v6ET+DQKDqb1XIKM7AtKXTbG+PToi2Liiw/FkmCwtikmIQHRuN4vusIU8pWPlSJeImDgpvLdjGjbf+LSdzznTqrBv0fOm8poom2ypTCV9KknloHSesc+V0OXefEjrd/fRulD9brsKn026y2q9XqWbfkK36JfVlqxQJrY5P8s12NQ2nG9RpDe5sYz4JkAAJmIkABZyZRsvO1sz8WfjsD3+OC60tiBgXidgk68pBKSIeuhX3Poild99vEXItSLQIOPskq0+/+vwrGEC/WkWaXVCEB3/1Ejos55jGJ6epVa2yWlXSucsLH2QfOQmZymTzmAT/bglhb2s4XsscreSoZMi5n/rYKHsOqbNTkTQtCdt/tB3tp9qRMS9DzWU79uoxNdlfVodKyvtwHg69dAiV/1uJnJtycOrtU2qu3JIfL1HPIy2rl1uPteLswbNIusL68yMrVmPTY9G0vwmnt59GQuag90wqSYhTvGsDvQMo/+9y5c2Ljhv0kMkK0u62btW+/bfUWalImJIAEZmyqKLmjRpVV4szydv7zF6U/7ocxV8uhrQpW6WMTxiPoruL7Jtyea0XTtTvrFcCUBZZOCc5couJBEiABEKFwNB/5UKlZ2HSjziLZ8xdioyOGiLedFkReQ7JMqfKWeg5PLfceBIy7bIsgKiub1ZfeVkTeaSWhdtojha7a+ZdeObwM0iY7SigZEwkbLrsl8uw+6e7cWTNERz8w0HJVmJr8Q8Xq/lvci8rMkVMidCSr7hJcZh791zbIgcRTSc2n8CmhzbhY699DKUPlKrNcGVxgMydm7FyBs7sOyNN2VJsaizeeewddZ9ZmonrvnOd9dnlcKYsnHCVyh4uQ8JHrH3JXWoNo16x4gpbURFyi3+0GDt+sgNvf+1tlS/tyzYhsLQtfXZOMg9QnkmaNH+S8jiKbfYLIaxPAfG+NdQ14Pu3fl9n8ZMESIAETE0gwjKZfXDSi6m7EprGf/OF9Si+6qqgda7zbDPW//xHWGnZtHdiTu6IdpS//z6WFxeoctVnmlFjEXOSZoiYm2RdvRpuwk4E3LN/34YZmYOCdkWJlZGC4+bbcFuJ6Cryv++F+gsqzCjeLVdJ5qjJHmsi4JyTzKMbsPynPVZStrulG3p+my4vx3W9/rnXsewXy6zeOsu0N3fv03VG+9nV0qXske1HvEniHZYQsLNdWrxxHzhvaLIsCZCA0Ql49y+k0XsTgvblZKTjvCWUFm+ZgB2MJCtX5bgtb5IrcSIiRjxzkt4srwwrYZc3eaISbzUNFkFr+ZK00cJA0nCibtXMVarM5iOu94OTh+KZip8yfEhbJv67Em9SXzbMtU9S1lm82T+Xa/uQqfMzX9yL9280SfbHcxZvEoYWzxvF22iIsg4JkICRCVDAGXl0LtvW0dEeNAHnDZ6GujpcNyffZRURMfJln+xFneRrYefsrZNnznUlz0xJRK144ZyTK1GnPZjiqdQibu3utcicYj2s3bmNQNyLh062/IiKM8c/GSLcWuta1VxCirdA/ITwHSRAAoEmwBBqoImP4n0SRs2fNdvwIk7Cp/etXDhmsWUv7CQMK0mHYuVaBJ4kHZJV15fz1LWTUJQ8I6RfvbHN5oHz1p7UhDjU9u1Hc+LOoAo5b+0ORnkdMl2Vv8omgINhB99JAiRAAv4kQAHnT7o+alvPoZqclWX55W3dpd5HTfukGQnxnm1sQFluFlyFT33yErtGhIckHZJV18MIPXnuTuzJs+HSaDx/2j7drraz5fx5vFdl3bBWP/Pkc0paCsryc7BoTh5kXpwczi4pmB45T+wOZBntcWtub0ZRehFWX7M6kK/nu0iABEgg4AQo4AKOfHQvfHNfJSpqG5GQkIDOnl4kJg5uGzK6FsdeS0K7PRcuoKWtzSeet7Fb5NiCFlJaQOmn2qun74f7tPf8DVfO/pn2EOo8LR69EXAyN07EsDsBKZv9Hjp7SIk5EXKS5OD2cEtauMm2K7Jyl8dkhdtPAPtLAuFLgALOZGMvQk5SZV1T0C0vmJKBcFtROhboMnZ68YK7dkYSbq7qiVdOzk6V47dm58xGW19byIo5tSjBsiXIhIgJEG9bXloehZurHwrmkQAJhDwBCriQH2J20CgE3Am40Yg2d30SMSfJPswq9wlJCS43BZZnRk0i1iR1tneit7PXJthK00pRmF5Ib5tRB452kQAJBIQABVxAMPMlJADIYhT75EvhZt+u/bUWdHvO7VHngE5NnqrEnHjpJBlF2GmxJgsQtHdN7BMPGwWbkGAiARIgAUcCFHCOPHhHAn4hoBeiBEK0DdcBPXdOyoio6+rrUqFXuRdx1z3QjfTkdBWGlTxJIvJcJVfHfGkh5qq8eNIkiTdNJwmDShKhJknmsUniXDaFgd9IgARIwC0BCji3aPiABHxHQC+ocLcowXdvGn1LIu4kyeIInUTkuUr2ws/+uRZi9nn6WjxpkiT8qROFmibBTxIgARLwjgAFnHe8WJoESIAESIAESIAEgk5gXNAtoAEkQAIkQAIkQAIkQAJeEaCA8woXC5MACZAACZAACZBA8AlQwAV/DGgBCZAACZAACZAACXhFgALOK1wsTAIkQAIkQAIkQALBJ0ABF/wxoAUkQAIkQAIkQAIk4BUBCjivcLEwCZAACZAACZAACQSfAAVc8MeAFpAACZAACZAACZCAVwQo4LzCxcIkQAIkQAIkQAIkEHwCFHDBHwNaQAIkQAIkQAIkQAJeEaCA8woXC5MACZAACZAACZBA8AlQwAV/DGgBCZAACZAACZAACXhFgALOK1wsTAIkQAIkQAIkQALBJ0ABF/wxoAUkQAIkQAIkQAIk4BUBCjivcLEwCZAACZAACZAACQSfAAVc8MeAFpAACZAACZAACZCAVwQo4LzCxcIkQAIkQAIkQAIkEHwCFHDBHwNaQAIkQAIkQAIkQAJeEaCA8woXC5MACZAACZAACZBA8AlQwAV/DGgBCZAACZAACZAACXhF4P8Bb9SoXt+6hDgAAAAASUVORK5CYII=)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can also collect multiple events of the same kind, for instance to monitor failures:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "class FailureEvent(Event):\n", - " error: str\n", - "\n", - "\n", - "class FailureExampleFlow(Workflow):\n", - " @step\n", - " async def try_to_do_thing(\n", - " self, ev: StartEvent\n", - " ) -> StopEvent | FailureEvent:\n", - " random_number = random.randint(0, 10)\n", - " if random_number < 5:\n", - " print(\"There was a failure\")\n", - " return FailureEvent(\n", - " error=f\"This is the bad place: {random_number}\"\n", - " )\n", - " else:\n", - " print(\"There was success\")\n", - " return StopEvent(result=\"Success\")\n", - "\n", - " @step\n", - " async def monitor_failures(\n", - " self, ctx: Context, ev: FailureEvent\n", - " ) -> StopEvent | None:\n", - " failed = ctx.collect_events(\n", - " ev, [FailureEvent, FailureEvent, FailureEvent]\n", - " )\n", - " if failed is not None:\n", - " print(failed)\n", - " print(\"The simulation has failed!\")\n", - " return StopEvent(result=\"Failure\")\n", - "\n", - " return StopEvent()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "There was a failure\n", - "==== None\n", - "There was success\n", - "==== Success\n", - "There was success\n", - "==== Success\n", - "There was success\n", - "==== Success\n", - "There was a failure\n", - "==== None\n", - "There was a failure\n", - "[FailureEvent(error='This is the bad place: 2'), FailureEvent(error='This is the bad place: 3'), FailureEvent(error='This is the bad place: 0')]\n", - "The simulation has failed!\n" - ] - } - ], - "source": [ - "f = FailureExampleFlow(verbose=False)\n", - "result = await f.run()\n", - "print(\"====\", result)\n", - "result = await f.run()\n", - "print(\"====\", result)\n", - "result = await f.run()\n", - "print(\"====\", result)\n", - "result = await f.run()\n", - "print(\"====\", result)\n", - "result = await f.run()\n", - "print(\"====\", result)\n", - "result = await f.run()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As you can see, it didn't trigger system failure until the third failure event. The visualization here is again pretty simple." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "failure_workflow.html\n" - ] - } - ], - "source": [ - "draw_all_possible_flows(FailureExampleFlow, \"failure_workflow.html\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![Screenshot 2024-08-05 at 3.40.01 PM.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAW8AAAF7CAYAAAAdewqZAAABYGlDQ1BJQ0MgUHJvZmlsZQAAKJFtkL9LQlEUx7+WYphQRERDgUU0mdjTwVUtInB4aNGP7Xk1LZ7Py/NFtDXU0iTU0ha2NEZDLQ3+BwVBQUS01R5JUHI711ep1b0cvh++nHM4fIEOr8a57gRQMCwzORPzLS4t+9zP6IILHvRhRGMlHlXVBLXgW9tf7QYOqdcTclcgddst3naHzdHTpzVWPfnb3/Y8mWyJkX5QKYybFuAIEqsbFpe8Rdxv0lHE+5JzNh9LTtt80eiZS8aJr4h7WV7LED8S+9Mtfq6FC/o6+7pBXu/NGvMp0gGqIUxhGgn6PqgIIQwFk1igjP6fCTdm4iiCYxMmVpFDHhZNR8nh0JElnoUBhgD8xAqCVGGZ9e8Mm16xAkRegc5y00sfAOc7wOBd0xs7BHq2gbNLrpnaT7KOmrO0ElJs9sYA14MQL+OAew+ol4V4rwhRP6L990DV+AQeeWTTJufZ3QAAAFZlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA5KGAAcAAAASAAAARKACAAQAAAABAAABb6ADAAQAAAABAAABewAAAABBU0NJSQAAAFNjcmVlbnNob3SvtZnuAAAB1mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4zNzk8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+MzY3PC9leGlmOlBpeGVsWERpbWVuc2lvbj4KICAgICAgICAgPGV4aWY6VXNlckNvbW1lbnQ+U2NyZWVuc2hvdDwvZXhpZjpVc2VyQ29tbWVudD4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+CqXkE6kAAEAASURBVHgB7Z0HfFRV9sdPSEIS0kkngQAJIRBKABFEkCIgKra1rb2udVfX3VXXLe66u+q61nXX+re3VbFhQZoi0qUGCJ0A6RASSCGkwv+eO7mTN5OZySRMeW/md/lMXrvvlu8dfu/Muee9F3BSJEICARAAARAwFIEehmotGgsCIAACICAJQLzxRQABEAABAxKAeBtw0NBkEAABEIB44zsAAiAAAgYkAPE24KChySAAAiAA8cZ3AARAAAQMSADibcBBQ5NBAARAAOKN7wAIgAAIGJAAxNuAg4YmgwAIgADEG98BEAABEDAgAYi3AQcNTQYBEAABiDe+AyAAAiBgQAIQbwMOGpoMAiAAAhBvfAdAAARAwIAEIN4GHDQ0GQRAAAQg3vgOgAAIgIABCUC8DThoaDIIgAAIQLzxHQABEAABAxKAeBtw0NBkEAABEIB44zsAAiAAAgYkAPE24KChySAAAiAQBAQmAoc2VcuV8rZlxaZaOnHcPp1DO6opMTvaZob4rEgKjGs/lJxrypfYtmw/gjUQAAEQ6B6BgJMide9U452lBHrLW8VmYWYR5hSbYBJYFuyoiCj5cdTD6Mhoqq41nWudr6auxrwrqDfR0YoaOlLRnpdFP+mMSJmHhR2ibsaFFRAAAScJ+LR4s1groWaRVgIdEdguzizCnk4s+krg61rbhX3ohWnSYoege3pEUB8IGI+AT4k3izW7PQ6uqiUl1n2i0+SoeEOku/p1KCorkqfUh9RQ5YFqGn6Tqe0jbuzX1aKQHwRAwMcJGF68tdZ1cyVJd0hachoZQaw7+26xmLPbZd+2IlJWOYS8M2o4DgL+QcCw4s2infdSsbSwoyOiyVcE297XjoU8NOMk7V5RLC1yiLg9UtgPAv5BwHDirUSbrWx2ifiChd2Vr5rylxeVF0HEuwIOeUHAxwgYSry/+3U+NRZbinZN81HaV7eDTpxspfSILOrdM8GtQ1R4bDe12gnQiQtJpKjgGLfWry2crXGIuJYI1kHAfwgYIs6bre1F9+ZT3+S+NDCtrxydEydP0Ft7n6Ivi963GK1JSefQL7P/RiE9QuX+z4vepPDASJrZ5zKLfM5urK9aThsql9EvBj0kT7nnJ/vl3DLoAbog7Rpni+5WPm1/+qaYWGx50zTRCVdKt5DiJBAwJAHdi/fmtwppy5vFlJOZY+EiWX5ogRTuGzJ/TWfET6eegaG0qWolvbLrMXp373N066DfywH5cN/LdNXAu7o9OIvKPqOWE00W589KvZxm2xDp2J6aO3MsznDdhnV/WMD5U/RtES1alU8zXslxXWUoCQRAQLcEdC3eMpLEhnAzzUONJRLqeCHcyWEmC3Ra8kUUHhRJx1pq5bHHtt5Dja0N9On+1+lwQzndmvkgzS+dQwtLP6GS+v0yz5mJM6VVHRbYi97Y8ySdFP/yj66nY801NKL3eNpYuUKWcf/6a+jJMSYrP6pnLKX1GiDPt/6z5eg6ennXP+ihYc+a8xxqKKNHNt9Jt2c9RCNixtH35XPpi6J36ODxYuovXD03Z/6OBkeNlEX9ceMtdGbi2bT68A+0szqPksLSxIXoAXmerf6o+lnAyxqLiC92sMAVFSxBwHcJ6PrZJspVYmtScnz82RQirO1fr72M/rPjYVpZsYjqW47RuPhpxCLOaUbKpXKZK0R4QsJ0ksK68x+UHp4pXCuP0NTkC+j7si+FoH8k8x1uLKevhBumR0AP6hcxSJQ1lVJ79af+kYPo4n43yDz8p+J4KW2r3tDhc1K4cgZHDRcXijJaduhbc/4VFQuo5Ng+yoocQT8e/Jae3/4w9emVTjcJ0T7eWk8Prr+eDjaYLkbsU3911xMUERxFN2X8lhpb6unJrfdTi/DpW/fHXEHbSkpIXyr+ulYKuPUxbIMACPgWAd1a3mxBso9b+XWtsbPl+9ioN4W1/DR9VzZXfjjP5OTz6M6shyk0MIzGxk2WAp8RlUNDo8fQioqF8vjdwiceFBBIkxJn0brDy6i0/oC5eL4g/CP3DXk+71xc/oV0m5yZMNOcZ0n518Qf6/TBpJXUKyhcWM4zhUjPo6v6m9w1Sw9+LevlNn1S+Jq4MGTS73OekadPS76Qrlt+Fn1T/IGwwO+X+0bFTaAHhj4l18OCIujZbQ/R0aaKDv2xrp+3OQLn4KpiohttHcU+EAABXyGgW/F2BnBG5FB6dNTrxBEneUdWC2t3Hi0tn0dVjRX099zXOhTBAjwgIpu+LnmP9tXuoF01W4it7ZYTzea8fcMHmoXbvNNqZUafS+i81Kus9hKFCXHmNCV5trTo2YoO6hFC+2t30w0D7yO2zAvr9lBMSBw9uuUei/OL6/eZtzMis83rCaFJcp3dP84k/pVSWizi38UkL56Z4gwx5AEBYxLQrXjzJOWEURPsUv1g/4vUt9dAaT1zeB5b0fx5b9/z9InwcbOgW4ftrRdRI3/f/EuKDI6moTGj6Rwx8fht0YcWdUQ6EeoXG5IgLgKDLc7TbgyPHisFerlw5QQHBMn6RsaeQc0nTReJ+JAk4Y5JN5/C63EhyebtkB6mi4BpR4B5P1ZAAARAQBHQrXjzk/f4hhRb/m5u/J6arbTi0EIaL3zcwT16qv5QfEiKed16ZW7xu5QaPoCeO+1jeQ5bwv8reJFOiH+OEocldiUFCJ/5VGF9r674TvjPA4TL5HzpR+8ZECKFPKRHL7pR+LNV+rTwddFuk4Wt9p3KUhSPBAIg4OMEdDthOfLONCqtFr5bO2mq8BXzJOAfNt5EPwifMk9GflX8Hr255ykRJTLObHX3FG6LXdWbqfDYXooTN/DUNFVRhXCVHBE+5Jd3PyojSRpPNNqphaTlzPVsr95oznOgbrdw0czv8NlZk2fOMyVptnSRsMtkStIF5v3npF4qolnW0adFb4hJymJi4X537/PSN2/O5GBF2x972fihVnCZ2KOD/SDgGwR0a3kzXn4GdnW0beubXSSBYtLx7T3P0HPb/mgeDb5J567BfzVvs9X7ddEHVHr8AN035DEqqi+gu1abxJQnBseLsLwd1ZtM+YWVbJ3GiyiVZQcX0EMbbqT3Jy2Xh9dULCH+WKcJSTPERKMp5C89PEtOTDaLC0Om8M2rdHn67VTddJTe3fNv+YkX7pKfD7yTRveeqLIQadoRQKY2BbTt0/bnudM+aT+nbY3vulRPI+xwEDtAAAR8hoCub4/niJPS72tpcG/HN55wXDf7uJNDU4Xudfwxcbz1mBD6IGKrldPRpsNiUjK804lJNcqNJxrEZONJp/Or8xwtm8WNP9XNVcJd0u7rdpRfe8y6P+oYC3djTA1u1FFAsAQBHyaga/Fm7izgHLs8JM2xgPvwGDnVNRZufg74rHfAySlgyAQCBifQ0UzVWYf4bsG02ZG0cuNKYoFC6khge7F4YJewuEf/xvTyho45sAcEQMDXCOje8lbAOW55wzPF1Ksxyu6NOyqvvyz5YoanCvrLaKOfIGBJwDDirZrNbhSOAee7L/lFwfZCCVV+X1xyCCVH4gSL52BxVA4iS3xxlNEnEHBMwHDirbqjRHzA0L7UUkU+b42zYBeXF1N1nQgDFDHwEG31TcASBPyTgGHFWw0Xi3jDLpKvB2NrnJO956Goc4yyVG/NUW+Y5xBAvFneKKOHdoKAewkYXry1eFjIW8Xr0bZ9WUyxCdEUERhlKNcKizUndXMSu0X4TvnhN8I1oh1nrIMACIjbQUT88klfBMFCzungqlr5kuK49Gg52cn79OArV0JdU1dDyrLmtimXiFzPjeYFEgiAAAh0IOCz4m3dU45WKRcfTkrQeZ0tdE5spavE4q5SdyZElTBzGSzOnFigVeI7R1mkOSWdEQlXiAKDJQiAgNME/Ea87RFhUeekhJ3XWdw5tR4n4ueEdDUpYebzWJw5OmbGv9tvnkF0SFeJIj8IgIA1Ab8Xb2sg7tjmt94n5Ebi9WTugIsyQcBPCej+DktfGJezn8uR1rey8n2hT+gDCICAdwlAvD3En90m/E5OCLiHgKMaEPBxAhBvDw0w+7k5TnvLW/afUe6hpqAaEAABHyAAn7eHB1GFMPIDt5BAAARAoLsEYHl3l1w3z2PRrthUKx91280icBoIgAAIEMTbC18CvmOSwwfh//YCfFQJAj5CAG4TLw0kCzdPYF6zdIKXWoBqQQAEjEwAlreXRk9NYHIMOBIIgAAIdJUAxLurxFyYX01aqklMFxaNokAABHycAMTbywPMN/DwBCb8314eCFQPAgYjAPHWwYDxBCZu4NHBQKAJIGAgAhBvHQyW8n/jBh4dDAaaAAIGIQDx1slAsf+bH16FCUydDAiaAQI6JwDx1tEAYQJTR4OBpoCAzglAvHU2QLiBR2cDguaAgE4J4CYdHQ4MbuDR4aCgSSCgMwKwvHU2INwcNYEJ/7cOBwdNAgGdEIB462QgrJsB/7c1EWyDAAhoCUC8tTR0tq5u4MEdmDobGDQHBHRAAOKtg0Fw1ARMYDqig2Mg4L8EIN46H3v2f+MVajofJDQPBLxAAOLtBehdrVJNYOIOzK6SQ34Q8F0CEG+DjC3uwDTIQKGZIOAhAhBvD4F2RTWIQHEFRZQBAr5BAOJtsHFEBIrBBgzNBQE3EYB4uwmsO4tFBIo76aJsEDAGAYi3McbJopWIQLHAgQ0Q8EsCEG+DDjsiUAw6cGg2CLiIAMTbRSC9UQwiULxBHXWCgD4IQLz1MQ7dbgUiULqNDieCgKEJQLwNPXymxiMCxQcGEV0AgS4SgHh3EZhesyMCRa8jg3aBgHsIQLzdw9XjpSICxePIUSEIeJUAxNur+F1bOSJQXMsTpYGAnglAvPU8Ot1oGyJQugENp4CAAQlAvA04aJ01GREonRHCcRAwPgGIt/HH0GYPEIFiEwt2goDPEIB4+8xQduwIIlA6MsEeEPAVAhBvXxlJG/1ABIoNKNgFAj5CAOLtIwNprxuIQLFHBvtBwNgEIN7GHj+nWo8IFKcwIRMIGIoAxNtQw9X9xiICpfvscCYI6JEAxFuPo+KmNiECxU1gUSwIeIEAxNsL0L1ZJSJQvEkfdYOA6whAvF3H0hAlIQLFEMOERoJApwQg3p0i8r0MiEDxvTFFj/yPAMTb/8Zc9hgRKH468Oi2zxCAePvMUHa9I4hA6ToznAECeiEA8dbLSHipHYhA8RJ4VAsCp0gA4n2KAH3hdESg+MIoog/+RgDi7W8jbqO/iECxAQW7QEDnBCDeOh8gTzUPESieIo16QMA1BCDeruHoE6UgAsUnhhGd8BMCEG8/GWhnu4kIFGdJIR8IeJcAxNu7/HVZOyJQdDksaBQIWBCAeFvgwIYiAAFXJLAEAX0SgHjrc1x00SpHIYSb3yqkQ5uqddFONAIE/JEAxNsfR93JPjsKIdzyZjFtfqvYyZKQDQRAwNUEIN6uJupj5akQwkX35pstbba6OR3cWG3e52PdRndAQPcEAk6KpPtWooFeJ8CCXbGplhJyI4mtbpWSRkXT9Ody1CaWIAACHiIAy9tDoI1eDYcQ9koKsRBu7hOsb6OPLNpvVAIQb6OOnAfbzROTi3+dTwXzD9msFb5vm1iwEwTcSiDIraWjcEMTMEWU1Err2lFH2PpGAgEQ8CwBWN6e5e2ztalJTJ/tIDoGAjojgAlLnQ2IHpvDwqydpLTXxmuWTrB3CPtBAARcTACWt4uB+mJxPFnJwjz8pjSH3cNNOw7x4CAIuJQAxNulOH27MBZxRwKOiUvfHn/0Tl8E4DbR13gYpjX2XCkz/p1DfGMPEgiAgHsJwPJ2L1+fLV1Z4XyTjjbB+tbSwDoIuI8ALG/3sfWbkq2tcFjffjP06KgXCUC8vQjf16pWIh6RHEoXfTTa17qH/oCArgjAbaKr4TB2Y5Qrpa68gVY9vsfYnUHrQUDnBGB563yAjNq8b2/dTKkTY4gFHQkEQMD1BCDermfq0RL3HjxM36zfTkUVlR6tF5VZEkiKi6URaUmUkRJPGUnxlgexBQJuIADxdgNUTxSpRLuuqYXikpIpPDLCE9WiDjsEjtXWUW1tDR0sK6MZI7NpZm62nZzYDQKuIQDxdg1Hj5dy/9tfUGbWYIi2x8l3XmHhnj10Wv8Utws4X8Bh5Xc+Hr6aAxOWBhzZhZt2UFJKCoRbp2PXLzOTFuXtIBZXdye+iPP3Acn/CEC8DTjmLAzJffoYsOX+02T+VcRzEZ5I/H2AiHuCtL7qwPO89TUenbaGrayM9L6d5jNahuqD5RTUM5jCY+Nc2vQTLa1UWVpEsUl9KCikZ4eyG+rqqO5IJcX3Te9wTO87rF0mLOL80YPPPb8q34xvW+U28zqvbKjaYLHtzMbo3h3vGxgaN9R8ak5v/3sVH3ze5uE3xgqL9+ZD1TYt77Vff0ohvcJpxLRZLu/Mvk3raF/eepp2w+0uL5sLfO/h31BivwE089ZfnXL52rZWlRbT6/ffQVc//C9KHdz+n11VsuX7BTT/9f/Q/e9/rXa5bJm3fr0UUi6Qo1CcTdai7Oi8lxYsp4Jy2+4Zd4q4EmcWZq0Y763aK5sbH9Xe36AISxsxIqrrk+t1NXUdMLTUtch9IQEhVFJdYj6e0TuDtGLPIu+L4m5J1dx9rBiRwIrPPqCJl17jlqZv/mEhtTY3u6VsVxfalbamDRlG591+n6ub0KG8hcIidjYVlLWL8cA20c9IbBdDdSFgkeePPfHWWuJcd3cjYFiotSLNAq3EmYU5IrFdjHMzcp3tZpfyRXQSTZVACbK8OhH1w2lZzTK5ZIGfs2eOXE+NTqWhvYdSTGCM3L580OVyadQ/EG+jjpxVu794+u/U3NhIq76cQzVVhyl1UDblL1tCYVHRtGfDGho+6WwqyFtHs277NfURxzixy+DDvz9Ik39+Ew0YdZpVie2ba+Z+RPs2r5fls4V87d+eoabj9bTso7dp508rqbW1hfpm59DZ199OkXGm/0TtZ9te2712Fa395jOqLCumgSNGU4touzmdOEkbFsylDYu/ES6NKkoQFjm3MU3U0Vmybut5d/xGnrJP9H3xWy/RkYOllDY4Rwp2r5hYOlx0QLYj56yzqfbwIZrzxMM0/sLLaf38L2XbUjOzBbN7KTI+UZbD7V7/7RdUvn8PDRgxhsKjYykwOIimXvsLm03rrmCqwtSk516NoKsLgVbkVX5bSxZxbeqsTSzWn+z5hBpONpASaq1Iu0ugtW3s7roSebXkctLEP04s7Ftqtsh1JepaK91oYg7xlkNp/D8jpp5Du4VIDxg+irLGnkmHC/fT3k1rKUqIKYtjn0FDaMuy72jHyqVm8d69diVVFB+glDYxt0eh//DRtEuI9IkTJ+j08y+V2ea9+LSsb9T08ygipjetnT+XPnjkAbr1qVcpUPiuHaUK0bYvnntUtmPiJVfLc6srys3t2rDwK/ruvddo4MjT6LRzLqINi76m/4mLzE1PvEjxaY7v2LTVVm7Lqrkfm36VnDxJy8UvlMXvvEIX3vN7cQGrlQw4T3NTk/CPF9M3Lz9LY2ZdSEMmTKYlH7xB3737Kl1835/o0IECc7uniIsJt2uXYJg11n1vEFIuFLXkdmoTT1Q6m5SIW/vFtZa1EuuYPjEUIP7pWaid7bfKx4JuLeos6GylW4u5EYQc4q1G1uDLgaNPp+CQEEoekCktVBZvTrPvvt/s6y3esZXyV/5AU6+7jQJ6BNCOVUtp0OhxFBrR/rNXnmT1J2ngIIpKSJJuk6xxZ0oLlS8UbKFOuvIGmTupfyZ98tRfaeeaH2mosPIdpbzvvpUXlav//C8KCOxBQyZOpf/c/nPzKWvnfU4pGVl06QN/lfuGnDmVnr/tSlo37zP5y8Gc0caKdVvZ581p0mXX0vhLTHVUlBQKITb5Zm0UIaz8G+n0Cy6Th6rKSsSvDtME26bF8+S+q/70BPUICqTB48+i/955ta0iPLLvVEIEWcAXbN5CwX13U0HDJilqyanJPiXWzgyCVtDZQi8vKadl9ctozrdzaMaAGdLFolchh3g7M8IGzpPYf6C59UPOnELrhVVbvDOf4vv0pf35eXTRPQ+Zjzu7Ur7PJHz9hdtApbShw+QqW66dpcNF+4XrYqgUbs7LF49U8cuAU0NtLdVUVtDwydPlNv8JCQ+Xxw8XF5r3dXUlWVyAVEpKH0AHRN/tpaT0DPOhyN7x1NTYILcP7t8rfw2wcHMKi4qi5P7teeVOD/5RlrSjKgcmm3zl0j8u/Of7a/bR4qrPzO6QEGFh50a6x0/tqF16PcYXME683FKyhaIDo6WQX555OelNxCHeev0WuahdwSGh5pJSMgdTdEIy7Vy9jCrT0qWlPnDUWPNxZ1cCAgJk1pCwXuZTAgODZXnqmPmAjZXm5iZqrK+3OHKSTsrtgADTrQfasvlAz7AwahR+9u6m4NAw86mqDvMOq5Ug8QtGJW1/mhuOU4Twv2tTQKBJyLX7PLFubXVbizS3wdrVMmf3HDl5l9xHWNhjIdidjZMS8tzkXFpWYrLG9STiEO/ORtDHjg+fNI02CrfF0UNlNHjcJBFb3TH22V6XTwp/MaeoBNPkXWH+JlKWfXnBLjmhmeBEvDSHBG4XLpuTrSek9d0sLNvS3TsoQVxQQiLC5UVg/9Y8GnPeJbI+jtVm10Xu2efKbWf+qLY6k9fZPHGp/ahsz05zdhbzsr27iK1zTyf5AKyUiR0E2lY7rEX7RMsJqt5bbSsrBYUHUXhyuM1jaiefX3ugliJShbtNXLvqCusoIi2CAkNcfyFrqmmi4xXHVdUWy4CgAIpKj7LY564NFnL+6EnEId7uGm0vlBsY3JNKhbg4ci9kC9cJT9jt23yErnzoUadbGRgURAf3F1DJzm1y8pMFeP2CrygqPonCRdTG0v+9KUU3NavziJDBp08kDudb8v7/EU+0bhDlaBPv42iPdSIahV0z68VkKCeewHQmadsaFum6/9yjZ8ymDx/7A3FkT/YZZ9EW0QdvJWur2l47/m/H/9HGyo2UmZ1pnqyrLaql+bfNt3lKUm4STXl6is1jamd9eb08/+x/n03BYcFyfeZLMyk2K1Zlcdmy4OsCyns9z2Z5QSFBdOk80wS6zQwu2Fn+UzmVriml0b8y3STEAs5x6ktKl8jSvelKgXi7YID1UkSOmPhj0TtSXkojp9m2UmOT+xD7f48eKqe+Q4Y73XSOYNm+6kf64G8P0D2vfiQjNb556Wma+/zjsoy4Pml05R8eo4i4zq3Q/iNH0/Trbqfln38gLwDsyuHzqc0dM+ny6+i4eEIfR3qQ+HA43jm3/JIyxoxzqr3atl7392flOVr3h/1CTO4gi7xtbeJz+uaMkCGGq+Z+SF+98CRl5I6V7Q4KDrZfpBePsHBvadwi5hdMoXLWTTnt3tMocZTpV5Q6FhTauSSE9g6lcQ+Mo8i+kdRw2DQfoM531/LcN86Vk+za8nnS3d1p77y9dKL5hEU1cpJzsBDwnd4VcNxhaTEs+t9wdIclt77p+HEKDAyyH64nYqhfu/82GizEeJKIquhKamlsEp7pE8LCbvejN9YdEyGErXLyritlcd6Toi3HxK3p9gS/RYTuNQgRt3fcUX222uoovzPH9qxbTS1NjZQtQgg5sTvn/35zK2WdPsFmnDffYfnkDRc7U7TL8yjhVn5bbQXV+6pp/q3z6axHz6KU8SnaQ+b1+kP1lPdyHh3efph4PapfFOXekUsp41Lk9tIHl9IZfzhDCipb8cry/u5X31HG7Azqf05/WVbltkr66cmfaNoz06jleAv9+Mcfqd/UfrT7892UfFoyjf/jeKraUSXrqtpVReEp4TT40sE04DzT3MKOD3ZIy/uKRVd0EG+ugMtc/MvFNOTKIZQ+M13WyX9++O0Psvzsq7Idlp/3ap6wGQKo8WgjlawsoeBewTTokkGUdVkWbX9/O217f5u4B0E8dnlwHE1/cbq5fLVSvLOYpiZO9cpkZueXWdVKLA1BgCf27KWtSxZSoQgXPFJeJizz88zZDhbsdjgZGC1cI9FJyTafDcI+am3iaJFDhQXaXR3W+w0ZQSSsJracHAkz++O1x1nsi7Zv7lCedkeiiBTh6BVbzzHR5uvOeu3hClr87ivyRp+4Pv1ox5plMjImW4QM6i0t2reo00nJ2uJaCt3VfiHmPijXx5rH1lBNcQ0Nu34Y8fzBrk930cpHVtIlX15CJ5pOUE1hjRQ1dpto09G9R6mxptG8q6W+ReY9IeY3Wptb5frWt7dS/xn9pW+9/mA9Lbp7EcUOiqXcO3OpdHUp/fT0T9J/3u/sfuZyDm85LOZHLC3tXgm9qFdSLwrrHUYF8wrM4s0Xg4ObDlLO9TnUWfnHDx2nA0sOUO+s3jTshmFUsqKENr60kZJGJVHy2GQqWlYkjYzsK7PNbdGu8K+aOWvnQLy1ULDuegL7xQRj6Z5dNPuu31JUYvvP5U1iArOiaL/dCoefNYNGJtl2w1ifdKS8hH4Ud146Slc//CT16NH1ya0TLcJy66Ts6dffQcmZWY6q7/ax3Bnni7tJm6lAPOdl46J5IhZ9EF3x4N+Io3j0lHiCkiNKOkssUtbpisVXSHHuldiLsi7NotRJqTILu1PW/GuNtFCtz+nq9vAbh9PQ60zPmdn4gqkNk5+YTCHRIZRxQQYtvX8pbf9oO2nF+/vffN+hGrbQc+/KpfTp6bJtDVUNxC6dwiWFFBobSgkjEmjji52Xz77zac9Oo8DQQPHqvlT68sov6ei+o7Jcnrxlt0naZNuuJ24UPyqAb3Ty9PNTYHl3+EroewdHGazbX9atRs7+5QM2zzvnF/fY3N+dnXy3Jt8+747Ed266q2xn2ss3FJ12/s/kp7P8/GYdfjWaNxI/KEr7vBF7bRh560gpcObjwrBlFwJHjZz+4OnSCs17JY+O7DxClTtMr9njSJNTTb0H9zYXUbO/Rq6vfXJt+z5h8bOrRpvYLWNteYfGmH41pJ2VJsW7+Mdiyrwokw58d4AGzhL3N4j+OFN+TEaMFG6uLyzO9Mu1taFVW73Ddb4b9b3d79Hj40zzPw4zu/AgxNuFMFEUCCgC/Eo0fqelN1JoQCg1in+dpegB0RSXE9ch28nWk7TkviVUkV8h3QlxQ+IoJjOGdn66s0Nemzs0+t7a2FEEe0a1h6c2H2umkKgQikyLNBel1rkdKnH99iYog8KCKH1qOhX9IB79K9wvDUcaKH2Gyf/tTPkWk7SWnhlVvcMlP/FwUu9JDvO44yDE2x1U3Vgmh4hF9Ayi8tJSm4+FdWPVKLoLBPhdlhkjM7pwhuuyXpZ5Gb2w/QWKEBER3UnsM2bh5glJ5brgCUZOPO/gKLHVzoKpUl1ZnVq1ueRY8cqdlZRzYw4pEWURrq+o72Bp2yygbSeL9Y9/+JH2zN1D0f2jzfHfrii/sz6Xl5bT5ed6/gmFptvZHFHBMd0RuOe8SfJFtyzgSPojwO+w5GdpOxuL7eoesO/1cM1h+ZyO7pQdEhsiT6s5ICYlxYRjRV4Fbf4/00QxT1Y6ShEpEbRv0T46susIHdp4iLa8scVRdhowa4A8vuG5DcT1la0po5V/X9nBt160pEha1izs2g+3jxNHrrAFv/+7/eYyeb+z5XNeWylQPAqhtrCWKvNNbiPrPPwslNPTT7fe7ZFtWN4ewez6Su6YNZE+X5svLfBIcSMK3h7vesZdKZF93Jz27NqpizfZ/GXcX+iRNY/INtkKF5QH7PyJ6BNBQ34+hHZ+spPy388XkTtBlHNdDuW9lkcc+scTgeZkZf5xOOHyvyynhXeabmAafNlgWY45v9VK0pgkGv3L0fLiwKLPdbELhOuTqc2NseqxVVZnmjbPe/M8iuwXKa10nrjc9dkuGYqoMndavspovWyrN21SmoxGWXzPYvrZlz+j4PD26BoW7sBjgfS7Cb+zPtsj24jz9ghm91XCcd87SiuoqMK2ZeC+mlGylkDfBJPv+PwxQ7xmcWvbw+scAcECzpEnXRVwPp99zhzBERYvJvHaxIz3d5qEZ4XdHjz5Zz3JaO9cDkfk2+C7co69smztP5XyefKSn72j3DpcPgt3eEO4xycptX2DeGtpYN3jBPhlA/xygTtnTvR43f5QIQv4azteo9SYVCpsLuyWiPsDJ2f7yM//Plp6lKKDor0q3NxeiLezo4Z8biGgno7X2dtd3FK5HxWqfThVd6xwP0Jls6ta0b520LUej+m21SiIty0q2OcxAhBvj6GWFWlFnHdAyB3zV6LNE8B6ehwstxri7XjscNTNBCDebgZsp3gW8aOtR4lvo2efOD8pT/uKMDun+cVurWDzOy71Ymlbw4d4WxPBtkcJQLw9ittmZcoa54Pqtnp/sshZrDmxL5sT+7P1KtiygW1/IN5aGlj3OIGXFi6nmV6MifZ4h3VcoXoRMTdxzp45sqVKzH3JMrcl1txZFmxOnn5Giay0G38Q590NaDgFBHyRAIuWEi5+yYCFmO8wiXlqdKp0sVS3VksEehZ1JdJ8+zq/HZ4T+67ZFcLp7iF3y6Xqs9ww0B9Y3gYaLF9s6v1vf+G1Z177Ik9390kr6Owz31a1jUqqS2S1/HQ9TkER7TYhizsnV/rTlShzuSzMcilcH/xMF5W0Ij2692gaGmd6iqFRhVr1S7tsp6zdi3UQAAEQsEFAa51bH2Zh57Stcpv50IZDG+T6ph2bzPvsrSjx5+MsvvaSspz5uHog1NC+JnFW5/iSSKs+WS8h3tZEsA0CINAtAkow1ZILuVz8czYp8ef8qgzex3eJfnzux84W4zf5IN5+M9T66yjfXTlQPJ8cCQSYgBJsLQ3elxOX45WXHWjbocd1q8fK6LGJaBMIgAAIgIA1AYi3NRFsgwAI6IoAP5/8kz2f6KpNemgMxFsPo+CnbdhbJsK2EuE28dPhd7rb7DrJrzRNhjp9kh9khHj7wSCjiyBgdALK7230friy/RBvV9JEWSAAAiDgIQIQbw+BRjUgAALdJwC/d0d2EO+OTLDHQwT2HhI+b4QKeoi2sauB37vj+EG8OzLBHhAAAR0SgN/bclAg3pY8sOVBAgUcbZKEaBMPIkdVPkQA4u1Dg4mugIAvE4Df23J0Id6WPLAFAiCgYwKI924fHIh3OwusgQAI6JiA9jknOm6mx5oG8fYYalSkJYCHUmlpYB0Euk4A4t11ZjgDBEDASwTg924HD/FuZ4E1DxLAc008CBtV+SQBiLdPDis6BQK+SQA367SPK8S7nQXWQAAEQMAwBCDehhkqNBQEQIAJ4E5L0/cA4o3/D14hgOeaeAU7KvUhAhBvHxpMdAUE/IWA9g31/tJn635CvK2JYBsEQEDXBIbGDtV1+zzVOIi3p0ijHgsCeCiVBQ5sdIHA0LihtO3Iti6c4ZtZId6+Oa7oFQiAgI8TgHj7+ACjeyDgawQQ620aUYi3r32zDdAfPNfEAIOEJuqeAMRb90OEBoIACFgTQKw3EcTb+luBbbcTwHNN3I4YFfgBAYi3HwwyuggCIOB7BCDevjem6BEI+DwBjvX29xt1IN4+/zVHB0EABHyRAMTbF0dV533Cc010PkBoniEIQLwNMUxoJAiAAAhYEoB4W/LAFgiAAAgYggDE2xDD5FuNxHNNfGs80RvvEIB4e4c7agUBEACBUyIA8T4lfDgZBEAABLxDAOLtHe5+Wyuea+K3Q4+Ou5gAxNvFQFEcCIAACHiCAMTbE5RRBwiAAAi4mADE28VAUZxjAngolWM+OAoCzhKAeDtLCvlAAARAQEcEIN46Ggw0BQRAAAScJQDxdpYU8rmEAJ5r4hKMKAQE8DIGfAdAAARAwIgEYHkbcdTQZhAAAb8nAPH2+6+AZwHguSae5e2rtW07so2Gxg311e451S+It1OYkAkEQEBPBPIr8ymnd46emuTxtkC8PY7cfyvErfH+O/bouesJQLxdzxQlggAIuJFAfpWwuuP82+pmvBBvN37JULQlAdxdackDWyBwKgQg3qdCD+eCAAh4nAC/NZ7fHu/vCeLt798A9B8EQMCQBCDehhw2YzYad1cac9z01moOE0SCzxvfARAAAQMS8PcYbx4yWN4G/OIatcm4QceoI6evdiPG2zQeEG99fS/RGhAAARBwigDE2ylMyAQCIKAHAojxbh8FiHc7C6y5kQDurnQjXD8qmsMEkUwEIN74JniEAN+ggwQCriCAGG8TRYi3K75NKMMpAhmJ8U7lQyYQsEcATxNsJwPxbmeBNRAAAZ0TQKRJ+wBBvNtZYA0EQEDHBObsnkOXZ16u4xZ6tmkQb8/y9tvacHel3w49Ou4mAhBvN4FFsZYEcIOOJQ9sdZ3AnD1z/P7tOVpqEG8tDayDAAjomoC/vz1HOzgQby0NrIMACOiSAPzdHYcF4t2RCfa4mABu0HExUBQHAoIAxBtfAxAAAd0TYH/35YMQaaIdKIi3lgbW3UIArz9zC1YU6ucEIN5+/gVA90FATwT4wVP80Sb4u7U02tch3u0ssAYCIOBlAvzgqUfWPNJBwL3cLF1WD/HW5bD4VqNwg45vjacnesMCzhY3J/i7bRMPsr0be0EABEDA8wS2VbU/8pVF+9DxQ55vhEFqhHgbZKCM3Ex5d+VMPFHQyGPorbYvLVkqq77i2yssmsDPOPH36BO4TSy+EtgAARDQOwEIt2mEIN56/6YavH24QcfgA6iz5vPt8f5ucashgXgrEliCAAh4nYB1mKB1gy4bdJn1Lr/dhnj77dB7puO4QccznP2hlr+M+wvhwVTtIw3xbmeBNRAAAZ0SYD83hNtycCDeljywdQoE2L/NHyQQ6A4Bey4T+Llt04R42+aCvd0gkJEUTy/PX073v/0FLdy0Q5ZgfYMOi7s61o0qcIqfEWDhZncJUkcCiPPuyAR7XEBgUd4O4k9oz560bvcBKdgF5e1W+czcbBfUgiJ8nQAmKO2PMCxv+2xwpBsEZoy0FOWGpiZat7eItMI9MBk37HQDrd+dAj+34yGHeDvmg6NuIACr2w1QfaBIfiiVSrgRR5Gwv4R422eDI24gwFY3+8aRQMAeAUxQ2iNjuR/ibckDW6dIoDOrurPjp1g9TjcwAfUcE/i5nRtEiLdznJDLBQRgdbsAoo8XMTl1MuK5nRxjiLeToJDNeQL2JiRhdTvP0N9y8rO7+fGvd4+429+63u3+IlSw2+hwoj0C7NPWRpeofPB1KxJdW8qbn8oO047Siq6daKjcyZRWO5uen7dM963O7pMg25iR4t35G4i37r8qvtFAe9a4b/TOPb1g0f5m/Xaqa2qhnr16UWSsb0/0DjZI/zYfqpYDzvcxcGist35RBpwUyT1fPZTqrwRYdPhOS226Y9ZERJlogXSyznehsjhkZg2m8MiITnLjsLcIlJeWUs+WJrrnvEkebwJ83h5H7vsVWrtHMFHZ9TFn4U5KSYFwdx2dR89I7tOHmoJ6euWRDxBvjw61/1SmdZN462elUWmz1c3CzcKApH8CPE58seVfnJ5MEG9P0vajupT1Dau764POE5ORkVFdPxFneI1AbHS0x+uGeHscuX9UeKTumOyoEnH/6LVrellUUel1d0lDXR0dLjrgmg45UcrJEyfpYMFe2r12FdVVOrZgtW1rqK2lw8WFTtTg3iw8ocwvHvFkgnh7krYf1dUnLkb2Fi4TYw767p9W0Ju/N8VcNzccp29feY4qDuxzW2eWz3mH3vnzvfTFc49S8c6tDuvRtm37ih/ozQfvcpjfVw9CvH11ZL3cr+ONzRQb0cvLrUD13SWQNmQYnXf7ffL06oqDtPXHxXSi9UR3i+v0vN3rVtHgcRPpt2/PpewJUxzm17bNYUYfP4g4bx8fYFd1r3rHJnNR1Tvb13ln7S6x3XjcfJxXUsXnSGMQ5W//lCKHn2FxTG1ED86l6OxctYllJwR+/N+bMkftkUoqyFtPMfGJNO3624VFXEA/zfuCeoaE0OhzLqAR02bJfE3H62nZR2/Tzp9WUmtrC/XNzqGzRf7IuASqPXyI5jzxMI2/8HJaP/9LqiwrptTMbJp1270UKcpll8nabz6jzNHj6bOn/yHL++K5f9BZV15PQ86cSvvzNtCyT96lypJCiklIplEzzqeR08+T+X547zXiCOTiHVvp+LFjdO7tv6a+4mJgL7375/uosrSYaior6H//eJCu+etTlLf4W9q8ZL5sF583eNwkmn7D7RQcGmZuW85ZZ1sUeaSshD5/5u904b1/oPi0fvLYxvlfUcHm9XTpA3+lnat/pPxlSygsKpr2bFhDZ158pWjzbFrx6Xu0Y/Vy8RU+Rv2GDJf1hMfGyfPXzP2I1i/4mpoa6ikuJY0mXn4dDcg9zaJeb21AvL1FXof1KoFmca7dskq2sHqf6Y040eI/vEpRIYFqVS7TIiKIgi12mfYHt8hlzbpvOx6M7E3FK76mfPEfVqXoAaZngSux73fRjeoQloIAi9v2VT9S+tARdOYlV9HaeZ8LAf4z9QztRWPPu5hKdm6jBa//V4jrFAoOCaV5Lz5Nu4VIjRKiGhHTm9bOn0sfPPIA3frUq9QsnrPOgvnNy8/SmFkX0pAJk2nJB2/Qd+++Shff9ydqqKuliuIDFCQuCCOmzKBlc94Vy5mUNGCQ9E3P+dfDlJCWTlN+frMQwtW08M0XRQsDhBieK9u5U7hdkgcOooS+/SgmMcnh+I07/2e0QJyfljWEciaeTUXbt4ryXqCcSdPo9Nk/o6Id+bRx8TyKT+1LY2dfam6bdaEtzc2yT61NjeZDddVVch/vOF5TS3s3raUo8V0eOGK0uEgl0XfvvEx538+XDJjRmq8/FReQ39Mt/3qFindtox8/fpdOF+1LHphJm8QF5ZMn/0r3vvYx9Qzz/q9KiLd5mP1rRQl18VdvSauZRVoJNIuzFGSBJOe0UacMJjoq0nYZkWlEA8SnLVWL/1ycpNgLcV95i8nSZFGHoJsgBQsxveR3D0txbhEitVRY1ufc/EsaMnEqZZ9xlhTrEiF2cULoWLjZsp505Q3y5KT+mfTJU8ICXfMjJWcMlvsm//xGOv2Cy+R6lbBc923eYKqo7W9gcBBljhknxXtg7unUu0+a9H/z4WseeUpawrkzz6d3/3gvrf7yYynefIzb+fM/PS7byduOUtb4SfTDh28Loe9PWePOpF1rVtBQcQGadeu91CMokAafMZn2blxL3D5XpNl330+pg4cKMa+RPna+eE277jZZdNrgHHr/kftpX946OlZ9VO7ji2Fi+kBKHTRUWOg/UmtTM1GYK1pyamVAvE+Nn6HOZsFWVrUS67Q4EeIkrGZXiPSpwlAir5Z92y4cLOrKel/55ZukxNwf3S6xSX3MghjdZtGmDzO5nkIjTBdJdpeU79srh6P/iDHmYUkbanJdsMWtxDspPcN8PLJ3PDU1Npi37a0cLNhNfYXIsQtDpYHClbDyi4+opbFJ7opL7Wdup8rj7JIFPCF9AG1YMJcOCpdQ2d6d0ppn148rUmL/gbKYKuEq4lS0bQt9/vTf5Lry6/Ox7PFnETN9+w/3UGK/AZR12hni18B0CvNCWKBsnNUfiLcVEF/bZMEu/uQlUmJNTccpLSVZF2LtLGsWc7Og90khJeb5Qsg5pV14E/mLi6WXjfjv4LB2EVVMAwIC5GqI5ud9YGCwtIjVMc7AbhGVtPvVPnvLkPBwi0NmN4KpWgpru5BYZHJyY9/GdfIXAgsn++lzp51LGxbNc/JsIg47VKml2XQxUdu8ZJcSp+YG04UqJilFuHaS5T7+w/7yuD79KCIunm567D+05YeFYt5gBS3/7ANa882ndO3fnjX71M0neWEF4u0F6J6oUom2MFmIreu0rEyzAHqifnfWocS8rxByTkXCp84WedrUC0nMxvmNkDtiHJWQKA8X5m8iZWmWF+yi5sZG4Z5Id3Sqg2OmaBMWuv2i3BMtrdKtwSfsF+6W2OQUChIvnD7VtO7bz4V4ptENj/2X2G3DYrz80/fF0nG0S2CQSc6aNJPnRw+W2W2O+uWSKNw1Z1x6tczHMeTr5n1GvaJjqXBrHpXs3iaP8fHyPbvo3b/8hvauXwXxtksVB7pNoHDuW6bJxjbRjtb4lLtdqM5PZBHnT9HOn+hkSobwlU/2K2vc1vAk9B0gf+qvX/AVRYmJufCYWFoqolXYF52aleOUe0SV20NY7Jz2b9kgyulNwybPkP50nuwbNfMCEXmyToh5Ho0992J1yiktw3vHUblwl9RUHqIQ4ZpZISxevui0iElWRykitrc8vPabz2U7S8UE7p4NP1G0iIaxlWJTUqnPoGxat/Arik3pQykDs2npx29RgZjUHC361VhfR8s/eZ/CwqNo0NjxYuKzSBYTm9w+T2OrXE/tg+XtKdJursfa0vYH0bZGarLE6ymgTzLViEiWwtpK6nftb62z+cx2gIju0KYAarttI6AHBfQIoAvv+T1989LTNPf5x2U2tmav/MNj0h1QVWqa/LNwlbS5WrRl8jpb2hw5wpEX9WKSb+p1vyCe6Fz64Vu06btv5QWBI1omX3Wz6VQ75ViXa72t2sLRHZUlRfTab02TiANEZEjW2AlUsmu79Skc4GJO7LqZft3ttPjdV0Qo5ToKF9bzcHGhKRQ+bZlstOuCux6geS8/TV+98JTMwr7t8+/8HfUSF7t+4sOTmT98+AYtevsleZy3M8eMN5Xn5b94JKyXB8AV1bO1XSzcBn2FaClXgiq3pLaB9lQdo8iQIBocG07hYumutFvUY+8Jw4nhIRQTarLg3FW/dblFpWVUVFpuOCv8/re/oJFj2icarfvV1e1G8aiCEydaRXzzqT0vhV0K7EMPCDRdJE6Km3bqjhymiNh4876uts1R/vqjR4j9+cpH7Siv9hi7c+qrj8iLlHa/o/Wm48dFFEmTzclILq/68EGKFr9gOPrFVuJHw45IjPbos73d9z/ZVg+xz+UEpJtEWJk5Vj7tI8eb6cHvttGa4iqLOn8zYRDdMML0s+9A9XH65/Ld9NL5IyzydHfjso9/snvqAxMH0TXD3Pdzs76plR5fsZuuFX0bHCfizkVS7pR8tsLFtr9MasrOa/6ERFhOLmoOdWk1lOP5NYlFnG/ocZTYX120fbOjLCIML0NEdViWzSew9dudxALLk41dST150tfGxC+XweXFJvfpSnEeyQvx9ghm91RiFm4bfu3/rC2gvIPV9NTMYTQ2JYaONDTTwoIKemblbkqNDKHpAxJoRVElrRQfV6bLc1LpmuEdRTou7NQnshy1s7iugb7cWUZXDed7Oy1TjuBjJAHvmxBHx2rrvP5wKkuK3ds60dJCP4pYdEdp+vV3UHJmlqMsOGaDAMTbBhQj7FKukgl2bqJhd0nvsGCa0j+egoX/M0as3z4mnSLEDTiRIcG0qvgIvbx2v+zqxR/+RM/NGkbpMb3ovS3F9NHWEqqob6QsYcH+elwGjUkxPe7yli830ri03rRk/2EqEVb72LRY+tXYAdRfnKdSbGhPGqDZVvt5+fbmYlpccIjevmgU9WjzPy4SF5SX1+2nd8S+YGHJvbB+Py3ce5CONbbSaamx9PszM4ldLmWiP3d+s5luHt2P3hdt3H+0nkYmRdMjk8WbZnoG0b3zTdbdffO30D2izecPsryrjyNuOLTQH2PDtWPg6fXAnsEitO4ZT1fr8fqa6uspQ0yWezK1zXB4skrU5RICYjKOfdz20gVZyVRa00CzP1hFz6zeS+tKj1Kr+AnLrotxfWKkwJ7ZL06efqsQdbaMWRSfEq6HftFhdO/4DKptaKGb526gvcKXzWl35TF64acCShJi+qtxA2mrsOx/uzCfGlvaQ7hKa4/ThvLqDp8T4lkXIxIjaTMfKzO9A5DL/HxHmfCFB0lf/D/Fr4K3Nh6gKekJdOOodFpXcoRu/lKEpIlzG4R/dd/RY/Tn77fT6OQYumvsQOkSemLlHgoVon9Jtuln7SVDUmloQsc7Ojm8kF1LHPPe3cQP239pwXL56W4Zzpx3/pghVHmw3JmsyKMDAuzvPlJd7fHX/MHy1sHgd6cJxUu+JHtWN5c3W1iewuCmV9cdoLc3FcpPaHAg3TamP92S24+SI0JoeFIkzdtdLvPyOe/mFdKwpCh68TyTD/yCzCSa+OYyejuviP421fTckQEx4fTcOcM4O6VEhtLd3+TR2rKjNLGvKUzr613lxB/rtPKmSZSbHC3rnb/3EJ0mLiBHhStnRWEl/WVKNh0VPvpPt5XS1cP70oPC2uY0OiWKbvhcPACpsEpeUHjfveMz6ebcvrxK+48ck26fnkE95C8MvrCcld7bruXPAp6/bqM815k/6s0o/GabgvLD5lP4pbPuTPwM9Ajxa4JFAW/TcSdpF5Xd1CBfROyi0pwuBuLtNCr9ZOSwQPUcEketOk+IL3+KhItjpZi4nLOtjJ4XVjhb4LeNtrxRg4W0vK6RLmqzYLlcjlBhwd0rLF6VzmgTad4eI45x2iUsciXelwzpQ1cN6+h3DusZKKO6LhzcR7hliukPYgLze+F+4TRD+N/3CCHmtFb8QrhHuD44tYh2ctonXCT8a4DTkPgIueQ/SRGhdLy53eo3H3CwwtwkPztPM2TB5ofq81Ir2NoiM1K6NhmmPdfRurpYcJ7sPgm0dMc+4tiGBLwOzRE2rx3jeYk9u3Z67Q3yEG+vDb37KmYr9kXhO74kO0WKXV8hfFdGp9JlQlhv/WoTLdhzqIN4Kx80W3za1Evc4VbX3GLeFSt85yqFCUuerfkWzZ1vCb1CzNEeKp92ef6gRHp1/T5aL1wn80U7pmckyotEfXOrzMYinRoVaj4lo3c4ZcS2+9RDhZWtEv+y6GqqFk/my7ESbhZNtq452RNsbT3dfTuQVpzlBeJQuzVf0PYWloFtF4aMxHianD1AVrto/Xq59MartrT9xno7AXaT8MTyHbMmetxdolph+T9V7cVS1wT4GdjFUXHyGR/qmR/aBrPFPFdEXjQJP/FfxYSeSoFC7RKEv7pWhNVZpyhxDgvxamGhX98WStgsLF+ORrlsaLslvV64SIhMVvtBYak3CNFVoXnWZdra5snNoYlR9Nn2MumzfnbWcJktLcpkWQ+KC6c7hWuHU7Xwub+1uYjienUhUsVkrMvzrf/wM1H4oVYsoqVV1bS1sMwpsdaWMzA5ziz0av9ejQirfbxUgqz2KWHmbRbnmRr3S8ZM+9a8ehuRVvxVmVh6h0B3L+CubC3E25U0PVlWSBjV1JTZfF4JizRPWM7JL5ECPn1gAoWJWNVlwr+8YM9Buvv0gbKlPdtutuD9p4lwwp8Jl8kHW4pkVMgEEUnCE5icJgk/skqri6ro420llB0fSf8WLpg4YWnniqgPlXZX1RH7tK1TinBxjBT+dE4XZCXREyK+nC8Wk9omTdOFxT1CuGH+JyJSeH14QhT9+6d9os2HxSRrqrjgtFv/1mXzds82M3yFiKLhydck4dO3TjXiJpMd/SbS6vnLrQ85vd3bxtuBtCKsLciRIGvzObuuB8Fwtq3I534CEG/3M3ZLDTn3P0f5/7hd3EFY1uGuSq6QfcpRIiTwEyHg37RNIEaLbRbuX7T5u0/vE0u875fzNtO/zx0uw/7Y982x4BzcxcL8Z2G5T0lvtwoHxUXSoz/ukn3iyUue3IwW0SIqLdlXQfyxTjMykmjkjKFy97lincV79qBkGcao8v7z7KH0JxFN8tDibXKXrGvaEIoXlnddm3jbuMNZ5u0rLHe26P+7Zi8dOd5ED0zIVMXKJXOqCYmhS3/xK7q07YhylSzKM7lMLE6ws1FVd5yuPNN1dz/aqQa7QaBTArg9vlNE+s7AD2GydVu8ttUVx5pkuJ0ta5QnL48J1we7TVTi0D++qYcjUrTprDeX07Uj+9JNIlqF/eoJ4V1wZ2gL6mT9mHDrcGhgnMa/3skadIqzAAAOh0lEQVQp5sPsaokUsezKh88HpHBTCOX8/R1zPuuVrgi5N/2c1u3Gtv8SaP8f678MDN3znPv/TflP3iv7YP1cE9UxRyLLLhatcPM5IWJS0Fq4VVm85Jt+HJWpzdud9XARmRIu4yy6frb2VwCfnb9zNz+gmdIuu9NhYcqvrJZdEXOHBeMgCLiJACxvN4H1dLHqjsvOrPBTadddwr1yTmYiXST86XpPPDmZv2uPSx5KZS3kA5Pj6c5zJuodAdrn4wQg3j4ywBy73G6Bm8TVniXuI1222Q0W7eJKcQeniMZha9sdb6dnMVcWus1GYCcIeIAAxNsDkN1ZhXw41S7xbsodlncORvdJp+rSA536w93ZNk+W7QnR9mR/UBcIdEYA4t0ZIR0ely9e+OqtDoLNTeX3OaqHL7Gwc1LP+uZ1X7LGlWDzjTccv+0uS5u5IYGA3ghAvPU2Ip20R/m2tdmis0dR2gU3OnQRSCEXD7PiZ6LwLeJRIiIjSjxD2dZNPtqy9bTOYs2x2jXiiYMQbD2NDNriDQIQb29Q70adWp+2Ot0Z0VZ5tUutRc77lZjzul4EnYWaU3GZeMhVzzCzWItXuXR6oZIn4g8I+DgBiLfOB9iWi6S7om2vq1xH9c5N8nDtuu+lr1w9+IotdJVY2Dm5wlpX4szlKWua1zkpq5rXVYifOyYeuXwkEDAqAYi3jkfO2kXiatF21HUWdE5K1Hm9dssqosbjUtx5W5tCxVvJQyJMt79r9/M6i7F1Yh+1TMKSjszKlX56lQdCrUhgCQL2CUC87bPx2hFb1jZPROrtHYzaiwvfLGQvQYztkcF+EOg+Adxh2X12Lj/Tlmh70truaoc4ioWTjHCxesxqV8tCfhAAga4RgHh3jZfbcmutWFUJW7N6tVrVpKdqK5YgAAKeJQDx9ixvm7VZCzdb2/zUQD0nrdWtN3eOnrmhbSDgKgIQb1eR7GY51sKtZ2tbdRFWtyKBJQh4jwDE23vsxbNIfm2+S1LPvm1rRMrq5v2wuq3pYBsEPEMA4u0Zzha1WE9MGsFNojqgtbp5ohIJBEDAOwQg3h7mzsKtnv7HVesxBNBZJLC6nSWFfCDgegIQb9cztVuitX/biMKtXCawuu0OMw6AgEcIQLw9gpnIWriNMDFpjUbrMoHVbU0H2yDgWQI9PFudf9bmC8LNIwer2z+/v+i1PgnA8nbzuGiF20gTk9ZYtFa39TFsgwAIeJ4ALG83MvcV4ZbRMZpb4eEyceOXBkWDgJMEIN5OgupqNq3g8bn8sgSjJu2TBfktPUggAALeJwDxdtMYWIcD6vUZJZ11X/vrgSNMjNqPzvqJ4yBgNAIQbzeMGN85qZIRwwFV23mpJil5He4SpoAEAvogAPF28Tiwpare5G504dZOUiKu28VfFBQHAqdIAOJ9igC1p1u7GIxsqfpSX7RjhHUQ8BUCEG8XjaRW7Dgk0MjCzUjgLnHRFwPFgICbCEC8uwjW9CRA0/sd1anWkSV6fxa3are9Jdwl9shgPwjohwDEuwtjwaLG/myOJGHBVkkbSufoXY4qv96Xyur2hV8QemeN9oFAdwlAvLtJTgm41l3iC6F0Wqub3+qOBAIgoE8CuD3+FMZFG8vNxRjdz219ITJ6f05haHEqCOieACzvLgxR7a52V4n1aYkTZlnvMtS2Vri54RBuQw0fGuuHBAJOiuSH/e5Wl1feMtmp89hXzIndDkYQQfbfa39FGPFxtU4NDDKBgA8RgNvEycHUTlB2dor5Jh2DPM9EK9y+4LfvbHxwHAR8gQDcJk6OojaixJlTjCKCvnQrvzPjgjwg4CsEIN5uGEkWbiO4S9jPbf6VYJA2u2G4UCQIGJIAxNvFw2Yk4Vbx3EZps4uHCsWBgKEJQLydHD5HkSaqCKOIoDayBDfiqNHDEgSMRQDi7eR4KfeCvexGFG7ui9Fv5bc3HtgPAr5OAOLtxAh3FmliFOHmfihXCXfbF27ld2L4kAUEfJIAQgWdGFZHkSZGEW6tq4S7zO3GW3GcGHxkAQGdEoB4n8LAGFW4cRPOKQw6TgUBnRCA28SJgajZubFDLj1M9HXmzuFGcxy3tasEFneH4cQOEDAcAVjeTgxZzU7LZ5qwcHt7oo/dIJwcCbHp2eOmC48e2iwbjD8gAAIuIQDLu4sY9SCC1v5r6y6wRc7PYVERMnpos3UbsQ0CIHBqBGB5d8Jvz+uPW+TQg8Wt3CC2Ys9ZuK2fVWKEuz0tIGMDBECgUwIQ704QHVo535zD26F1nVnc1seNMqFqBowVEAABpwlAvB2gUn5lzsLP63bkX3ZQjEsOaf3XqkDlFpHx21+1P6eEjyOiRFHCEgR8kwDE28lxzbzlISdzuj6bLeFWtVgfY/92mngUrTcvNKptWIIACLiPgE+I996Dh91CaOvSBRQeGklBIydTd+vISIrvdtus/de2ClLWNx+Dm8QWIewDAd8kYMg36bCQLszbIUekoMw9wu3q4R6YEk8ZifE0MzfbqaKdEW5VEKxtRQJLEPAfAoazvBdu2kGL2oRbDVN0VJRa1d2yuqZGtokvMupC05mAW088OtMpuEmcoYQ8IOA7BAxleWuFmwW7f780itWxcGu/JvuLiok/nGaMzLZrgXdHuLlMxHIzBSQQ8B8ChrpJR1nc/fum0ahhQw0j3Px14jaPFG3mxP2w5UPvrnBzmez75slLJBAAAf8gYBjxZqubE4sgf4yY+FeCarvy2at+nIpwqzIg4IoEliDg+wQM4/Pee8g0MRkdrV//tjNfFxZvdp8o/zefs+eNx+nQivabgZwph/OwqyQyK9ecPXpwLkIEzTSwAgK+TcAw4q3EzhU+7uqqSjpeW02Rsb0pPCrG4yPM/nqeyNzww0Kib1+nhsPlDtugFWkItENUOAgCfkPAMOLtyhFZ+ukHtHrB1zTj59fR1MuudWXRXSqr9n9PUXBLo/mc0PhkihLWc8+4FIJIm7FgBQRAwAYBvxRvGxw8vivuaCHVixuAousapfsDd0V6fAhQIQgYmoBfiPeuTeto/nuvU1VZCWWMGEUBAR3naQt3baPlX31GBVvzKCSsF2WPGUczr72JQkLCqL6ull79430Uk5hMOePPpBVffkp1NdWUlXsaXXzHvdQzJFR+CVbNm0trFnxFRw8forTMwXTuDbdR6sBBNr8glTH9KOmS22n0lJk2j2MnCIAACDgi4PPifVgI9lv/+KNk0DMkhLavXd2BR9n+vfTGI7+npkaTC6O+toZWfTuXCndvo7se/w+dONFKh0qK5GfXxrUUl9yHOM+mZd9Tcnp/OuviK2nNwq/pqzdelGVHxsRSQf5meuGBX9KDr7xP0XG2b5GPHjK6Q1uwAwRAAAScIdDRBHXmLAPlWbtonmxtcv+B9NAbH9EDr7zbofXfz3lfCveg3DH0p7fm0F3/fF7mKdmzm7auWmaR/4p77qff/vdNGnfObLm//MB+ufz+4/fk8md3/poeeu1DGj11htxes+BLucQfEAABEHAlAZ8X78NlxZLXkDGnSxdITFwiDRgyzILhvm1b5Pa4mbOpV0SUdHlkjRor95Xs3WmRd+i4iXI7pf8AuWxsqKfG4/VUe/SI3N65YS199uIzdLjEVO/BogMW52MDBEAABFxBwOfdJq0trZJTcJtfmjdCwiMs2LU0mdwloZr9vSIiZZ7mpmaLvMq/3aNHoHl/a6upDt5RUVxIPYKD5TG29sPayjFnxgoIgAAIuICAz4t376Rkialwl+kOzZbWZirYYvk2+H5ZQ2mP2Lf9p5U0MGcENQkx50lOTkn9+smloz8s9CzU5fsL6Pyb76RBI8fQ7s0bqKq0hPplm26Jd3Q+joEACIBAVwn4vHiPnjKDVs//inasW03vPvEXYh+1mphUsEZPnS7Fe8U3n9P+nVvpyMGDckKSJx5zz5ou8jeorHaX2aPHSvH+6Ll/inOm0brF38p6rv7dnymlf4bd83AABEAABLpDwOd93hyyd8HNd0k2KtJETSYSBcj9LNA/u+s+io5PIJ6k5EiSfllD6Na/PSnDAE25bONVYYdTLr2acidNk+eu/OYLGW549hXX0LDxJh+57bOxFwRAAAS6R8Awj4S9/+0vZA+nTBjfrZ6yu6ROTCryhKWjVHu0Sgo2x3p3J7W2tlDtkSqKibdfz8at2+Tt8XfMmkin8qad7rQP54AACPgGAUO7TY5UlNPiD9+xOxI9Q8Pool/8Sh4PCgzuVLg5Y2RMb7vlOXMgMDDIoXA7UwbygAAIgEBnBAwj3vwaMX441RHxQCf1cKqW5maqKi+z28eeYWF2j3nzgHq7Dqxub44C6gYBYxMwjHjbwpzQpy/d/uiztg5hHwiAAAj4NAHDTFjOFK8O47S/sNjQA6J9FZqhO4LGgwAIeJWAYcSbXQzsOmGXgxJAr5LrRuXcbtX2zl5C3I3icQoIgIAfETBMtAmPCb/38eX5y83Dw2+lMcqbdfgXg/J1O3oBsblzWAEBEAABBwQMJd6qH9q3yKt9RljyLwd2/2Ci0gijhTaCgL4JGHLCUutyUO+21DdmgmjrfYDQPhAwGAFDWt4GY4zmggAIgIDLCRhmwtLlPUeBIAACIGBgAhBvAw8emg4CIOC/BCDe/jv26DkIgICBCUC8DTx4aDoIgID/EoB4++/Yo+cgAAIGJgDxNvDgoekgAAL+SwDi7b9jj56DAAgYmADE28CDh6aDAAj4LwGIt/+OPXoOAiBgYAIQbwMPHpoOAiDgvwQg3v479ug5CICAgQlAvA08eGg6CICA/xKAePvv2KPnIAACBiYA8Tbw4KHpIAAC/ksA4u2/Y4+egwAIGJgAxNvAg4emgwAI+C8BiLf/jj16DgIgYGACEG8DDx6aDgIg4L8EIN7+O/boOQiAgIEJQLwNPHhoOgiAgP8SgHj779ij5yAAAgYmAPE28OCh6SAAAv5LAOLtv2OPnoMACBiYwP8DXrsQy4dfP0gAAAAASUVORK5CYII=)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Stepping through workflows\n", - "\n", - "You can also walk through a workflow step-by-step:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Kick off the workflow\n", - "f3 = FailureExampleFlow(verbose=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Running step try_to_do_thing\n", - "There was a failure\n", - "Step try_to_do_thing produced event FailureEvent\n" - ] - } - ], - "source": [ - "await f3.run_step()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Running step monitor_failures\n", - "Step monitor_failures produced event StopEvent\n" - ] - } - ], - "source": [ - "await f3.run_step()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can also use visualization to show just the most recent execution rather than all possible flows, even when the flow is mid-execution:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "last_execution.html\n" - ] - } - ], - "source": [ - "draw_most_recent_execution(f3, \"last_execution.html\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![Screenshot 2024-08-06 at 1.21.16 PM.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAArIAAAKECAYAAAD7WtkwAAAKqmlDQ1BJQ0MgUHJvZmlsZQAASImVlwdUU+kSgP970xstAQEpoTfpLYCU0AMovYpKSAIJJcRAULEriyugKCIioCzoIkXBVal2LFhYBBtYF2RRUNfFgg2Vd4FD2N133nvnzT1z5ruT+Wfmv+f/z5kAQKGxRaIUWA6AVGGGOMTHnR4VHUPHjQAIyAMSgAGFzUkXMYOCAgAis/bv8uEeEo3IbdOpXP/++38VeS4vnQMAFIRwPDedk4rwSUTHOCJxBgCoQ4hfZ2WGaIqvIkwTIw0i/GiKE2d4bIrjpxmNno4JC/FAWBkAPJnNFicCQNZF/PRMTiKSh+yJsIWQKxAijLwDl9TUNC7CSF1giMSIEJ7Kz4j/S57Ev+WMl+ZksxOlPLOXacF7CtJFKezV/+fn+N+SmiKZraGPKJkv9g1BLNIX1J+c5i9lYfziwFkWcKfjp5kv8Q2fZU66R8wsc9me/tK1KYsDZjlB4M2S5slghc0yL90rdJbFaSHSWgliD+Yss8VzdSXJ4VI/n8eS5s/ih0XOcqYgYvEspyeH+s/FeEj9YkmItH+e0Md9rq63dO+p6X/Zr4AlXZvBD/OV7p091z9PyJzLmR4l7Y3L8/SaiwmXxosy3KW1RClB0nheio/Un54ZKl2bgRzIubVB0m+YxPYLmmXgCbxAAPLQQRCwBlbTTzDwyuCtmjqjwCNNtFosSORn0JnILePRWUKO2QK6lYWVDQBTd3bmSLzrn76LkBJ+zpfGB8AhAnEK5nxxJwBofgOAjP2cT/8UALJtAFzczpGIM2d8U9cJYAARyAIaUAEaQAcYAlOkMzvgBNyQjv1AIAgD0WAZ4AA+SAVisBKsBZtADsgDO8EeUAoqwEFQA46C46AFnAYXwBVwA/SAu+AhGADD4CUYAx/ABARBOIgCUSEVSBPSg0wgK4gBuUBeUAAUAkVDcVAiJIQk0FpoC5QHFUKlUCVUC/0CtUEXoGtQL3QfGoRGobfQFxgFk2EarA7rw+YwA2bC/nAYvBROhFfAWXA2vAMugavgI3AzfAG+Ad+FB+CX8DgKoEgoJZQWyhTFQHmgAlExqASUGLUelYsqRlWhGlDtqE7UbdQA6hXqMxqLpqLpaFO0E9oXHY7moFeg16Pz0aXoGnQz+hL6NnoQPYb+jqFg1DAmGEcMCxOFScSsxORgijHVmCbMZcxdzDDmAxaLVcIaYO2xvthobBJ2DTYfux/biD2P7cUOYcdxOJwKzgTnjAvEsXEZuBzcPtwR3DncLdww7hOehNfEW+G98TF4IX4zvhhfhz+Lv4V/jp8gyBH0CI6EQAKXsJpQQDhEaCfcJAwTJojyRAOiMzGMmETcRCwhNhAvEx8R35FIJG2SAymYJCBtJJWQjpGukgZJn8kKZGOyBzmWLCHvIB8mnyffJ7+jUCj6FDdKDCWDsoNSS7lIeUL5JEOVMZNhyXBlNsiUyTTL3JJ5LUuQ1ZNlyi6TzZItlj0he1P2lRxBTl/OQ44tt16uTK5Nrk9uXJ4qbykfKJ8qny9fJ39NfkQBp6Cv4KXAVchWOKhwUWGIiqLqUD2oHOoW6iHqZeowDUszoLFoSbQ82lFaN21MUUHRRjFCcZVimeIZxQEllJK+EkspRalA6bjSPaUv89TnMefx5m2b1zDv1ryPyvOV3ZR5yrnKjcp3lb+o0FW8VJJVdqm0qDxWRasaqwarrlQ9oHpZ9dV82nyn+Zz5ufOPz3+gBqsZq4WorVE7qNalNq6uoe6jLlLfp35R/ZWGkoabRpJGkcZZjVFNqqaLpkCzSPOc5gu6Ip1JT6GX0C/Rx7TUtHy1JFqVWt1aE9oG2uHam7UbtR/rEHUYOgk6RTodOmO6mrqLdNfq1us+0CPoMfT4env1OvU+6hvoR+pv1W/RHzFQNmAZZBnUGzwypBi6Gq4wrDK8Y4Q1YhglG+036jGGjW2N+cZlxjdNYBM7E4HJfpPeBZgFDguEC6oW9JmSTZmmmab1poNmSmYBZpvNWsxem+uax5jvMu80/25ha5FiccjioaWCpZ/lZst2y7dWxlYcqzKrO9YUa2/rDdat1m9sTGx4Ngds+m2ptotst9p22H6zs7cT2zXYjdrr2sfZl9v3MWiMIEY+46oDxsHdYYPDaYfPjnaOGY7HHf90MnVKdqpzGllosJC38NDCIWdtZ7ZzpfOAC90lzuUnlwFXLVe2a5XrUzcdN65btdtzphEziXmE+drdwl3s3uT+0cPRY53HeU+Up49nrme3l4JXuFep1xNvbe9E73rvMR9bnzU+530xvv6+u3z7WOosDquWNeZn77fO75I/2T/Uv9T/aYBxgDigfRG8yG/R7kWPFustFi5uCQSBrMDdgY+DDIJWBJ0KxgYHBZcFPwuxDFkb0hlKDV0eWhf6Icw9rCDsYbhhuCS8I0I2IjaiNuJjpGdkYeRAlHnUuqgb0arRgujWGFxMREx1zPgSryV7lgzH2sbmxN5barB01dJry1SXpSw7s1x2OXv5iThMXGRcXdxXdiC7ij0ez4ovjx/jeHD2cl5y3bhF3FGeM6+Q9zzBOaEwYSTROXF34ijflV/MfyXwEJQK3iT5JlUkfUwOTD6cPJkSmdKYik+NS20TKgiThZfSNNJWpfWKTEQ5ooEVjiv2rBgT+4ur06H0pemtGTRkOOqSGEp+kAxmumSWZX5aGbHyxCr5VcJVXauNV29b/TzLO+vnNeg1nDUda7XWblo7uI65rnI9tD5+fccGnQ3ZG4Y3+mys2UTclLzp180Wmws3v98SuaU9Wz17Y/bQDz4/1OfI5Ihz+rY6ba34Ef2j4Mfubdbb9m37nsvNvZ5nkVec9zWfk399u+X2ku2TOxJ2dBfYFRzYid0p3Hlvl+uumkL5wqzCod2LdjcX0Ytyi97vWb7nWrFNccVe4l7J3oGSgJLWfbr7du77WsovvVvmXtZYrla+rfzjfu7+WwfcDjRUqFfkVXz5SfBTf6VPZXOVflXxQezBzIPPDkUc6vyZ8XNttWp1XvW3w8LDAzUhNZdq7Wtr69TqCurhekn96JHYIz1HPY+2Npg2VDYqNeYdA8ckx178EvfLveP+xztOME40nNQ7Wd5EbcpthppXN4+18FsGWqNbe9v82jrandqbTpmdOnxa63TZGcUzBWeJZ7PPTp7LOjd+XnT+1YXEC0MdyzseXoy6eOdS8KXuy/6Xr17xvnKxk9l57qrz1dPXHK+1XWdcb7lhd6O5y7ar6VfbX5u67bqbb9rfbO1x6GnvXdh79pbrrQu3PW9fucO6c+Pu4ru998Lv9ffF9g30c/tH7qfcf/Mg88HEw42PMI9yH8s9Ln6i9qTqN6PfGgfsBs4Meg52PQ19+nCIM/Ty9/Tfvw5nP6M8K36u+bx2xGrk9Kj3aM+LJS+GX4peTrzK+UP+j/LXhq9P/un2Z9dY1NjwG/Gbybf571TeHX5v875jPGj8yYfUDxMfcz+pfKr5zPjc+SXyy/OJlV9xX0u+GX1r/+7//dFk6uSkiC1mT48CKEThhAQA3h4GgBINALUHAOKSmZl6WqCZ/wHTBP4Tz8zd02IHQLUbABEbAQhEtGpqBkHeZc4DEITYMDcAW1tLdXb+nZ7Vp0TuCADuXkwrZsATHeFG8A+ZmeP/0vc/LZBm/Zv9F1ZnBWnHgaHhAAAAimVYSWZNTQAqAAAACAAEARoABQAAAAEAAAA+ARsABQAAAAEAAABGASgAAwAAAAEAAgAAh2kABAAAAAEAAABOAAAAAAAAAJAAAAABAAAAkAAAAAEAA5KGAAcAAAASAAAAeKACAAQAAAABAAACsqADAAQAAAABAAAChAAAAABBU0NJSQAAAFNjcmVlbnNob3R4D+WyAAAACXBIWXMAABYlAAAWJQFJUiTwAAAB1mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj42NDQ8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+NjkwPC9leGlmOlBpeGVsWERpbWVuc2lvbj4KICAgICAgICAgPGV4aWY6VXNlckNvbW1lbnQ+U2NyZWVuc2hvdDwvZXhpZjpVc2VyQ29tbWVudD4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cq7hi8gAAAAcaURPVAAAAAIAAAAAAAABQgAAACgAAAFCAAABQgAASPrOJLL7AABAAElEQVR4AezdB9zV1P3H8R9TEJD9AC6GLAURHLgF3K0L997a1tr+raPFUfeo1lo6rNq60OIexb0RRx2ggigbFFHZU6YC8s8vT09IcnP3yvikL7nZOXmf+3r1+5x7clJngzUJEwIIIIAAAggggAACEROoQ5CNWI1RXAQQQAABBBBAAAFbgCDLFwEBBBBAAAEEEEAgkgIE2UhWG4VGAAEEEEAAAQQQIMjyHUAAAQQQQAABBBCIpABBNpLVRqERQAABBBBAAAEECLJ8BxBAAAEEEEAAAQQiKUCQjWS1UWgEEEAAAQQQQAABgizfAQQQQAABBBBAAIFIChBkI1ltFBoBBBBAAAEEEECAIMt3AAEEEEAAAQQQQCCSAgTZSFYbhUYAAQQQQAABBBAgyPIdQAABBBBAAAEEEIikAEE2ktVGoRFAAAEEEEAAAQQIsnwHEEAAAQQQQAABBCIpQJCNZLVRaAQQQAABBBBAAAGCLN8BBBBAAAEEEEAAgUgKEGQjWW0UGgEEEEAAAQQQQIAgy3cAAQQQQAABBBBAIJICBNlIVhuFRgABBBBAAAEEECDI8h1AAAEEEEAAAQQQiKQAQTaS1UahEUAAAQQQQAABBAiyfAcQQAABBBBAAAEEIilAkI1ktVFoBBBAAAEEEEAAAYIs3wEEEEAAAQQQQACBSAoQZCNZbRQaAQQQQAABBBBAgCDLdwABBBBAAAEEEEAgkgIE2UhWG4VGAAEEEEAAAQQQIMjyHUAAAQQQQAABBBCIpABBNpLVRqERQAABBBBAAAEECLJ8BxBAAAEEEEAAAQQiKUCQjWS1UWgEEEAAAQQQQAABgizfAQQQQAABBBBAAIFIChBkI1ltFBoBBBBAAAEEEECAIMt3AAEEEEAAAQQQQCCSAgTZSFYbhUYAAQQQQAABBBAgyPIdQAABBBBAAAEEEIikAEE2ktVGoRFAAAEEEEAAAQQIsnwHEEAAAQQQQAABBCIpQJCNZLVRaAQQQAABBBBAAAGCLN8BBBBAAAEEEEAAgUgKEGQjWW0UGgEEEEAAAQQQQIAgy3cAAQQQQAABBBBAIJICBNlIVhuFRgABBBBAAAEEECDIJuQ7MHnp1Ix3OnlJ5u3m4J4tu5vZlM+eLdJvS9mZFQgggAACCCCAQJECBNkiAat1uDuYmhA6Zdk0T3HMes/KCi6Y0NujeTfPVc16gq+HhQUEEEAAAQQQyFOAIJsnWCV3N2FVA6k7pBYSUJs2bZqx6Nm2m4NXrFhhZlM+M21L2dm1QoOtO+zqMiHXBcQsAggggAACCAQKEGQDWSq70gTWZ2a+YF84l6DqDp5mvmnTJp6Cm/WelRVacIfaFStWeq5qtplPz0bfgjvkEnB9OCwigAACCCCQcAGCbIW/APmEVhNE9dMdUs36Che9bJfTQOsOu7XL6Vt+tSBHdDrELg/htmzVwokRQAABBBAIvQBBtsxVpMHVdA1I19JqgqkJrGa5zEWLxOndITdbwDXhdnDnQyNxbxQSAQQQQAABBIoTIMgW55dytLvFNSi4mpDavn07+1iznHIiVqQVyCXcmi4JtNimZWQDAggggAACkRcgyJagCjW8av/WbMGV0FoC7DSnMOE2XastrbVp4FiNAAIIIIBAhAUIsgVWXrrwasKqtria+QIvwWFFCMydO88+OijYEmqLgOVQBBBAAAEEQiRAkM2jMkx41UPcra8aWOkqkAdkFXY1wXbu3LmeqxNqPRwsIIAAAgggECkBgmwO1WUCrD+86qG0vOYAGLJdNNSma6nlQbGQVRbFQQABBBBAIIMAQTYDzogvn7dfROAPsITXDGgR2xQUarWVlkAbsYqkuAgggAACiRQgyAZUuwZY83IC3Wy6DtDnNQArJquCuh4QaGNSudwGAggggEBsBQiyrqolwLowEjyrodbdl5ZAm+AvA7eOAAIIIBBqAYKsVT3+PrC0wIb6O1uxwhFoK0bNhRBAAAEEEChIIPFB9pZxQ50RCAiwBX2HYn+QO9DqCxa0hbZni+6xv29uEAEEEEAAgbALJDbI+rsRtG/f3hlCK6jS1v+wXpbNWipLZy6WJV8sltWLV0nzrVpIi86tpEWnltK0fTOpU7dO0KGRWLdiznJZNG1hQWVtu12NbNqmSUHHRuUgd5jVMtPdICo1RzkRQAABBOIskMgg6w6xubTCzvn4W3n7ujdk7eq1ab8Lm23ZXAZed4DoZ7pp/udzZcqIibL37/dNt0tZ169bvU4+e2SstOlZI1vt0dFzranPTZLRf3/Psy7XBb2fjvt0znX30O63Yt5yGXv3GNnpF7umDebuQKuts0P6Xhja+6FgCCCAAAIIxF0gcUHW3ZWga9dtsr59a9x9H8nnj36a0/egQeMGdkjdfJctU/Yfc8f7doht1KKRHPP4ySnby71CW5HfuPQlWbN0jex+yd6yzYHen8aTHmRnvjlD3v3DKLsajhh2rDTbfLO0VaJj0Gqg1U+dhvS7kK4GabXYgAACCCCAQPkEEhVk8w2x88bPkdcuedHR3+agbtLj8F52yKm/aX35ftkaWTxtkXz28FhZMGG+s99P7xgsrbq2dpZ15tmznpTvvlkm1QqyX739pbxzw0i7TNmCbMcBnaXL/t085c+00Kpba2ncatNMu4R+25h/WH9oPDPRLme2IGtuZvr0GYRZg8EnAggggAACVRBITJA1IVa7EmhLbC7Tuze/KTNHfmHv2ufUftLn1B0DD9uwfoO8cdnLMnfcbHt7l/27yh6/G+DZN0pBdofTdpTtT+nnKX/cFwoJsmri7mpAy2zcvyXcHwIIIIBA2AQSEWRNn9h8QqxW1HPnPiXLvlpq19lRD50gm7ZN/0DT0i+XyPM/f9reVx98OurhE+x580++QVbD8coF1k/X1vNjjVo0lvqb1Denyvq54ccNsmrhSmlS09TZN58W2dAE2Q1iGzTYtKE0bNrQuZd8Zn5c92OtRdumUqde+ofxCg2yWhYTZukzm0/NsC8CCCCAAALFC8Q+yJoQq1R9++6Ql9irFz0v8z+fZx+T9YEmK3SN/P0rUq9BPdlks01k1wv3kjp16sinwz6WWf+d6QRiPVnzji3sc+70s13F3Z928fRFMvHJ8bJkxmLP/rqzdlXofWJf2XrvTvax5p+1K9fKy7951l7UFuM1S1fLZ8PH2n1hNVBvufvWdkvxD8u/t9fpjtq9YZPmjeyRFgZdf6B9rLuPbKFB9qM7PpA5Y7+1z7fn7waKdjkImlbMXS5vXvmqvand9h2k///t4dntq7e+kKnPT5bFUxc6D9jpvXTYcXPpe+bO0rh1ajcGfYhu6guTpH6j+vKTvx8h01+eIl++MUPmfTrHOXebbdvahlvutrWz7uv3vpJx93/k8W7aoZnUa1hPOg3qItuflFvLtOlmwGgGDi0zCCCAAAIIlF0g9kH2zDfPsxGzDa8VJD3BeshrrPWwl046GsFelw9K6fsadJx73Xt/fEu+eH26e5Uzv9flA6XTwNpuDhqmPn8k+0NlPY7YTnY5f3fnHGusfrpPHvuQvVzTu50TvM0OGqq//+57s+j51MA2+IHj7HWlCLIzXpkq79/2jn2+nkf1kp1/sZvnemZBH57Th+h06v/rPaT7Ydva82tXrbVGTvivHUDtFQH/6AN1e142UNxhVHcbe+8YmfDYePuI3ifskPEBvb5n7Sy6j07uMtsrXP90P3TblJDt2uyZ1Qe/NMzqRBcDDw0LCCCAAAIIlE0g1kHWtMbm26XAaC/9yuoucG5tdwGzrm2vGtli162lQ7/NrVDbJuPP1XrMgonW0+3WGK2j//ae07q455Da/rM1vdtLk3ZN7XFpX/jFf8wlpNfxfaS9dX4NbasWrpLpL02R2R9942x3P4zkDrLODtaMtvpqt4jeJ+4gzbduYT+MNvX5SfYu2xzYrfb8TRo6gbAUQVZbhx878kH7Glr2Y586RerWr+sulojVcj3ijMdtE91w7JOn2C3YOq9Gpox6vPbTbd29jfyw4geZ9e6XnoCrAVyDuJncQdas6/bTnlKzfTu7JXrmqBmyaMrGcXLNdZfP/k4WTpov016Y7PwR0O/sXazhtza1/3hp3aOtOV3WT7oYZCViBwQQQAABBEoqEOsga1pjcxlmK52qBtGRl73ihFD/flvsupXdPWCL/lvZP9X7t5vlTH1k3QEuaEQBPcc7N46Ur9760j7drhfsKd0O6WnP+4OsBu39/vAT+yd27WZQt349u39pPn1kNUQG/XxvX9D3T+f9t/H8/P7+n96WGa9Os/fa98aDPF0ndOWiKQvkpV/XdoXQ0RH2vqJ2TF3tVvHiL0fYx+n1D737KE8fX92gowpoX1adttqzowy4en97Xv/xB9kDbv2ptNuhg7N93Zp1ol1F9Do67X6xNQTZQRuHICumj6xzEWvGdDGgVdatwjwCCCCAAALlEUhEkM23b6yfevm338lHd30g3374tX+TZ1lfCrDjz/qnBDDdKVOQ1fCn47xq2Drs7qMDW3m13+g7N75pX6/fObtIr+P62PP+IHvYPUfbLbD2Rtc/+QRZ12FZZ7VbgHYPMJP2SX3tt7VDlnXebxvZc8hAs8n+1H60k0dMsOfdQdc9QsTeVwySjgO6eI6zF6zWXO0PvHDSAntx8INWq6z1RjWd3EFWW5x3v2Qfe737H23t1T8adNrhdGtkhpM39n8lyLqlmEcAAQQQQCAaArENsqZbQSF9Y9NVnY4bq0NszR7zjXw7+mvn4Sn3/tqaeMBth6T0pc0UZN3H++fXr10vS62Qq/1szTinfc/cyX5oSff1B9lTXj3bfwp7OZ8gq/ew2dbp31DmvkDHfbrIdsdu76zasGGD/Ofkx+yRAnTl8c+cZneR0HkdQeCJo4fbrdv6wNnRj5zkhHb3CBHHj7CO2bSBHpIyff7wOBlnPUCn0343H2w9ALaFPe8OsulatbXeRl7xir3/9if3tcLsTva8/lOqIGv6yjKCgUPLDAIIIIAAAmUTIMgWSmu1DuoLDjTYznh1qqf/pYa0I4efYD/5bk6fS5DVvqDfjp5lBdcl9rmXfLnY6UtqzqOffc+wguxJfe1V7iCrIxvoyxiCpnyCbKGjFpjrusPmnpcOkM77drU3aYu2Ga1A++7qCAQ66XBhDx18nz2fzz862oE+kKWTO8ju/8efSPu+m6ecyt2twX193ZEgm8LFCgQQQAABBEIvENsga16AUEz/2FxrT4OY/mw95vba/pt6nHtEAl3OGGStUDx++Ccy/t9jddesU7ogq0Nz7XPlfoHHVzLIrpi3XEac+rhdjs133lL2vekge97dz/fwe4+RzbaqbfVdvWiVPHXiI4HlzrRSRx7QEQh0cgfZoDer6T6LrOG8XvrVMzprPwRngrQulyrI6rnGjasdfeL+QXfqIhMCCCCAAAIIlEkgtkG22K4Fk57+XKZZY5lqyNJht/ShrmzTmDus15xa45nqtL3VYrqD1XJqpkxBVsea/cz6ydxM+tN+TZ/20qJzS2nZpZXo6Ab6KtxRV79m75IuyLofnjLnMp+VDLJ6zTcufVnmfFI7puzRj5wo9awXOjx+1L/t4uh4rgf/9XBTNFn3/Tp59LAH7GW9992sB7FymXRINPXRKSxBlq4FudQc+yCAAAIIIFAagdgG2clLp8otY4dKoUNvuceQDXrlbBD/F69Nk/dufdvetO1RvWWnX+zq7JYuyGp3AhPwdGcdkaDrT3pInbret1BNfOIz+eTu0fb53D/9u7sWhCnI6nBX7940yi6v3lPdBnXl/T+9Yy/7RwzQlU+f9KjTr/akF89MHbbLPjL9P2EJsmYILl6MkL6u2IIAAggggECpBAiyaSTnfPytvHHZy87Ww++zfgq3WgAzTe4n790D/esx6YKs+wEkbfU1b9ryX8d97j6n9hN9i5dOBQVZ39BTep5SjCOr5zGTtrI+ddzD9oNd+kCWvh5W71Wn454+NeWVsyMvf8UZK3efq/aTrffqZO/r/+fjuz60x9XVh9H6nLKj01Je0iB7/7HSbIvN/JfOaZkgmxMTOyGAAAIIIFASgdgGWdUpZhxZffr+ubOfsh+6MtLa/9T/iljdpk/jf/rAx86bpXTdUQ+fYA2q30Rn7UlfeKBDbOl08stnOS2u33wwS0ZdVdtlQIPyYfdaw29Zr7Z1T/owmWnN1PX6wgQdtF+nXIOs+zru4bvsk1j/lDrI6nnd/U7Ndboc0FX2+O0As+h8ultw9WE5HYZMX6PrnrR7xYvn1441q+vdxsUG2Y//9aFMevJz+3IH//UwabNtjfvSOc+b/rGMI5szGTsigAACCCBQsECsg6zpJ1to9wINTiOveNkzzJaGzTY929pjtdapV9d6e9YSmffZXM/oAv2sB5B6/e8VqKZmdDD++Z/Psxe1C4AGpQ79trCHmfrPqY+Z3WTbo3tL5/262g9CLZmxyG6l/Gz4xv6zumPXg7vLbhfV9iPNNci6x3fVoKhdHxo228R5sYI7yOobs/SNWrlOun+/s2qDtfsY9ygBZr3/RQVmvb7x69WLNxppGXf+5W6WdY3lv9oe7mzyUxOcF1O4DfQcxQbZz4aPlU8f/MQuTusebexXB2td59I32twDrbFGgk8EEEAAAQQqIxDrIKuEZvSCQseTXbVwpbx17eue4bUyVY1/tAKzr/+BLl1vxjLVn8v14bJMU6eBXWTmqC/sXTQ4Dh52nIjVcJtrkNUxcJ849qGUS5zw7On2W8DcQTZlpywr9IGrQ+46MnAv06VCN2oL9ZHDj3dao/0H6Oti9YE2fbVupknf2LXvDQdZD5DVc3YrNsj6u5Loif0PpTkXC5gxIVY3MVpBABCrEEAAAQQQKINA7IOseehL7Qodikv7e+rDX9qiqi2b/kmDZdvtaqTn4F7Sukdb/2Z7WV9s8L71IJgJo7pS3wS29+/3Fd02+anPZex9H6UcqyFxp5/vKu37bW6PwWreLqavcG3RsaX8sNx6WOzo2tEAOu3bRfa6dFDKOcwKHUXgvzeP8rQwawDVa0x7cbJ8+Jf/ml3z+sw0fu3kpyfYb0XTE7ofUkt3gfU/rBcdh3byfza2vpp9dUSDPtYbufT1vPWtURDck/sPBXNP7u06734N7vanWKNKnLZxVAndPuHx8TL2njE6a096PX2hQy6TeTUtD3nlosU+CCCAAAIIlEYg9kFWmYrtYuCm1jFjV8xZbvedrd+ovrTcpnXKg0vu/f3zOkrB99+tsYNYo5aNPa2T679fLzoG66oFK+3+oTrOqj+w+c+X97L1E/6KuctF6opsslkj561beZ+nAgesXrzK6aPcpF1TadKmqfMmsHJdXv+oUP+6VreRRq0aS70GG1t9013TtMbyNq90QqxHAAEEEECgPAKJCLJKZ7oYFNpftjz8nDXqAibE6n3wgFfUa5PyI4AAAghETSAxQVYrxoRZnS+0m4Eey4SACpjuBDpPiFUFJgQQQAABBCorkKggq7TuMFvoA2CVrSKuFjYBfXuXtsTqp06E2LDVEOVBAAEEEEiKQOKCrFas6TOr84RZVWDKVcDdlYA+sbmqsR8CCCCAAALlEUhkkFVKd5jVZQKtKjClE/C3wjI6QTop1iOAAAIIIFA5gcQGWUPsDrQaZps2bWL919Rs5hMBT19YbYXVENuzRXdkEEAAAQQQQKDKAokPsurvDrO6rIG29rOd/ck/yRPwt8CqAK2wyfsecMcIIIAAAuEWIMi66odA68JI6GxQgKUVNqFfBm4bAQQQQCD0AgTZgCryB1rtaqD/tW9PC20AVyxWEWBjUY3cBAIIIIBAwgQIshkq3B9odVf60WYAi9gmDa86uYfS0mVaYFWBCQEEEEAAgfALEGRzqKOgQKuHMdJBDngh3CWo9VWLSR/YEFYWRUIAAQQQQCCDAEE2A45/0+SlU2XykqnyzMwXPJvoeuDhCOVCuvBK62soq4tCIYAAAgggkJMAQTYnptSdtJVWJ3+o1XWMeqAK1Z1McNVSmC4EpkSEVyPBJwIIIIAAAtEWIMiWoP4yhVrTWquX4WGxEmCnOYWG1RUrVtqh1R9c9RDCaxo4ViOAAAIIIBBhAYJsiSvPdD/Q0wa11rqDLS9fKAzfBFV9SEsns+w+mwbXHs272QGWlxe4ZZhHAAEEEEAgPgIE2TLXpQm2U5ZNs/vXBl2OcBukUrvOhNRMoVX31OCqE2/dshn4BwEEEEAAgUQIEGQrXM0m2OplM4Vb3e4OuLXL8Xx9rgmrpmuA3qtZp/P+yYRWWlz9MiwjgAACCCCQLAGCbAjqO59wa4qrIVcn87lxfRMzm7LN2VCBGX8Q1ZCqk3u9ez5dkUxo1ZZWnegmkE6K9QgggAACCCRPgCAb4jp3B1wtZrYW3Ey34g+87n0zbXPvly14ZtvuPpd73oRV08JqthFajQSfCCCAAAIIIBAkQJANUonAOg25Oum4tu5Jw66Z/NvM+mp8Oi2q/+vLqmUgqFajJrgmAggggAAC8REgyManLjPeiQm+QTvlGnhNy2nQOXSdBtN0b0HjrVnp1FiPAAIIIIAAAoUKEGQLleO4tAK3jBua0lKsOw/pdyGtsGnV2IAAAggggAAC+QoQZPMVY/+sAtr6e8vYoSn7aYvukL4XpqxnBQIIIIAAAgggUIgAQbYQNY7JKkAXg6xE7IAAAggggAACRQoQZIsE5PD0AnQxSG/DFgQQQAABBBAoXoAgW7whZ0gjQBeDNDCsRgABBBBAAIGSCBBkS8LISdIJ0MUgnQzrEUAAAQQQQKBYAYJssYIcn1WALgZZidgBAQQQQAABBAoQIMgWgMYh+QnQxSA/L/ZGAAEEEEAAgdwECLK5ObFXkQJ0MSgSkMMRQAABBBBAIEWAIJtCwopyCdDFoFyynBcBBBBAAIFkChBkk1nvVblruhhUhZ2LIoAAAgggEFsBgmxsqzacN0YXg3DWC6VCAAEEEEAgigIE2SjWWsTLTBeDiFcgxUcAAQQQQCAkAgTZkFREkoqRrouBGtw/6M4kUXCvCCCAAAIIIFCEAEG2CDwOLVwgXReDni27y5C+FxZ+Yo5EAAEEEEAAgcQIEGQTU9Xhu9F0XQyO6HSIDO58aPgKTIkQQAABBBBAIFQCBNlQVUfyCnPmm+cF3vSQfhdKzxbdA7exEgEEEEAAAQQQUAGCLN+Dqgpk6i9LmK1q1XBxBBBAAAEEQi9AkA19FcW/gPSXjX8dc4cIIIAAAgiUQ4AgWw5Vzpm3QLr+sjz8lTclByCAAAIIIJAYAYJsYqo6/DeaLszy8Ff4644SIoAAAgggUA0Bgmw11LlmoAD9ZQNZWIkAAggggAACaQQIsmlgWF0dgUxhlpclVKdOuCoCCCCAAAJhFSDIhrVmElwuHv5KcOVz6wgggAACCOQhQJDNA4tdKydAf9nKWXMlBBBAAAEEoipAkI1qzSWg3OlelsDDXwmofG4RAQQQQACBHAQIsjkgsUt1BDL1l+VlCdWpE66KAAIIIIBAmAQIsmGqDcqSIkB/2RQSViCAAAIIIIDA/wQIsnwVQi9AmA19FVFABBBAAAEEqiJAkK0KOxfNV4CHv/IVY38EEEAAAQTiL0CQjX8dx+IO6S8bi2rkJhBAAAEEECipAEG2pJycrJwChNly6nJuBBBAAAEEoidAkI1enSW6xOn6yyoKIxkk+qvBzSOAAAIIJFCAIJvASo/6LafrL9uzZXcZ0vfCqN8e5UcAAQQQQACBHAUIsjlCsVu4BAiz4aoPSoMAAggggEA1BAiy1VDnmiURIMyWhJGTIIAAAgggEFkBgmxkq46CZ3r4i24GfD8QQAABBBCIvwBBNv51HOs7zBRmj+h0iAzufGis75+bQwABBBBAIMkCBNkk135M7p0wG5OK5DYQQAABBBDIU4AgmycYu4dTINOwXLTMhrPOKBUCCCCAAALFChBkixXk+NAIEGZDUxUUBAEEEEAAgYoIEGQrwsxFKiVAmK2UNNdBAAEEEECg+gIE2erXASUosUCmMMvbv0qMzekQQAABBBCoogBBtor4XLp8AoTZ8tlyZgQQQAABBMIiQJANS01QjpILEGZLTsoJEUAAAQQQCJUAQTZU1UFhSi2Q7u1fep37B91Z6stxPgQQQAABBBCooABBtoLYXKo6AunCLG//qk59cFUEEEAAAQRKJUCQLZUk5wm1AGE21NVD4RBAAAEEEChIgCBbEBsHRU0g09u/aJmNWm1SXgQQQAABBGoFCLJ8ExIjQJhNTFVzowgggAACCREgyCakornNWgHCLN8EBBBAAAEE4iNAkI1PXXInOQpkGpaLbgY5IrIbAggggAACIRAgyIagEihC5QUyhVktDW8Aq3ydcEUEEEAAAQTyFSDI5ivG/rERIMzGpiq5EQQQQACBhAoQZBNa8dx2rUCmPrO6By2zfFMQQAABBBAIrwBBNrx1Q8kqJJAtzB7R6RAZ3PnQCpWGyyCAAAIIIIBArgIE2Vyl2C/WAoTZWFcvN4cAAgggEFMBgmxMK5bbKkwg3RvA9Gy0zBZmylEIIIAAAgiUS4AgWy5ZzhtZgXKGWX3AjG4Kkf1qUHAEEEAAgZAJEGRDViEUJxwC5QizpvsCLbvhqGNKgQACCCAQfQGCbPTrkDsok0Cm4bkKCaO3jB0qGmZ1YjSEMlUap0UAAQQQSJQAQTZR1c3N5itQyjDrDrI9W3S3w2y+5WF/BBBAAAEEENgoQJDdaMEcAoECmcJsrq+0Nd0K3BcopFXXfTzzCCCAAAIIJF2AIJv0bwD3n5NAsWHW3RrrviBh1q3BPAIIIIAAAvkJEGTz82LvBAsUE2bTBVnlpL9sgr9U3DoCCCCAQFECBNmi+Dg4aQKZwqxaBIXSoG4Ffrf7B93pX8UyAggggAACCGQRIMhmAWIzAn6BbGHW310glyDrP8Z/TZYRQAABBBBAIFWAIJtqwhoEsgpkC6fuYJqpW4H7Qu5j3OuZRwABBBBAAIFgAYJssAtrEcgqkC3M6ogGGk41yOY6BXVNyPVY9kMAAQQQQCBpAgTZpNU491tygUxvASvkYvSXLUSNYxBAAAEEkihAkE1irXPPJRfI1m82nwvSxSAfLfZFAAEEEEiyAEE2ybXPvZdUgDBbUk5OhgACCCCAQFYBgmxWInZAIHeBbP1mcz+T2P1rB3c+NJ9D2BcBBBBAAIFECRBkE1Xd3GwlBErZMkt/2UrUGNdAAAEEEIiqAEE2qjVHuUMnUMoAa26uZ4vu9ksWzDKfCCCAAAIIILBRgCC70YI5BAoWKEeINYXh4S8jwScCCCCAAAJeAYKs14MlBPIS0D6xz3z5guhnOSfCbDl1OTcCCCCAQFQFCLJRrTnKXXWBcrbCBt0c/WWDVFiHAAIIIJBkAYJskmufey9YoNIhVgtKf9mCq4sDEUAAAQRiKkCQjWnFclvlESjl8FqFlJAuBoWocQwCCCCAQFwFCLJxrVnuq2wCleoXm+4GCLPpZFiPAAIIIJA0AYJs0mqc+y2pQLVC7ZB+F9pdDUp6M5wMAQQQQACBiAkQZCNWYRQ3vAKVDLX0lw3v94CSIYAAAghUToAgWzlrrpQggUqEWroYJOgLxa0igAACCAQKEGQDWViJQGkENNDqVK6xZgmzpaknzoIAAgggEE0Bgmw0641SR1BAQ+3kJVNlytJpJX2BAv1lI/hloMgIIIAAAiURIMiWhJGTIJCfgOl6oEeZVtv8zrBxb/rLbrRgDgEEEEAgWQIE2WTVN3cbQgETaosJtHQxCGHFUiQEEEAAgbILEGTLTswFEMhdoJhQu2f73eScbU/P/WLsiQACCCCAQMQFCLIRr0CKH1+BQkItYTa+3wfuDAEEEEAgVYAgm2rCGgRCJ5BPqO3ZsrtoVwPtO8uEAAIIIIBAnAUIsnGuXe4tdgKmH+29kx6UhWsWZbw/+s1m5GEjAggggEAMBAiyMahEbiGZAq9+M9Ien3bVulVpAWidTUvDBgQQQACBGAgQZGNQidxCsgVGfPm8PDPzhYwItM5m5GEjAggggEBEBQiyEa04il2YwMwFi+0DZ85fJDMXLinsJCE9avKSKXbJfqj/nazcZJboZ7tle3hK27NlD88yCwgUK9CpTUv7FJ1qWkuntq2KPR3HI4AAAnkJEGTz4mLnqApogB01cbrMnF8bZKN6H5QbgbALDNxuGxnYq1vYi0n5EEAgJgIE2ZhUJLcRLECADXZhLQLlFiDQlluY8yOAgAoQZPkexFZg1IRpVivsjNjeHzeGQNgFCLNhryHKh0D0BQiy0a9D7iBAYNhbo9N2I2jerJm02Kyp6CcTAggUJ7Bs+XL7BF99OyfwRJ1qWskZA/oHbmMlAgggUKwAQbZYQY4PnUC6ltiOW3Sww2vzZk1DV2YKhEAcBGbNniNBgZYwG4fa5R4QCKcAQTac9UKpChQICrHa8lobYgmwBbJyGAJ5CYyfPE1MS6058IyB/RnVwGDwiQACJRMgyJaMkhOFQeCaJ172FENDbJ+ePEHtQWEBgQoI+MMsrbIVQOcSCCRQgCCbwEqP6y0HtcbuvcuOcb1d7guB0Au8M+YTTxlplfVwsIAAAiUQIMiWAJFThEPA3xqr3Qm23rxDOApHKRBIoIC/zyytsgn8EnDLCJRZgCBbZmBOXxkBHS922KjRnovRGuvhYAGBqgjQKlsVdi6KQGIECLKJqep436i/WwGtsfGub+4uOgL+vrJ0L4hO3VFSBKIgQJCNQi1RxqwCBNmsROyAQFUEli1fIeMnT3WuTfcCh4IZBBAogQBBtgSInKL6AgTZ6tdBqUsw6ePRsu6H753Tdu3TTxo3idYQaksWzJdvpk9x7qFZy1bSqWcvZzmXmaULF8jX0yY7uzZt0VI6b9vbWQ77DEE27DVE+RCItgBBNtr1R+n/J+B/k1efnt2tlx/kHnpWrVgubzzxsGzeuYvsNPCAyLtOGfuRjHz6MTnv+lsjey9Xn3aMfL96tVP+82/6s2zVrYezHJaZ9evXyejXXpYF334th599nqdYH735mjx5x1+cdd377iRnXXGds5zLzCdvvSGP3/5nZ9dteu8g5159k7Mc9hmCbNhriPIhEG0Bgmy064/S/0+g0CD74/r1MuaNV+TF4ffZoekIK4jsfvChkXVdOGe2PD/sbpn8Se2Dbzc/8UJk7yUKQXb6+LEy4p47ZeGcb6X3bnvKKRdf7vEmyIr1YgS6Fni+FCwggEBJBQiyJeXkZNUSKDTIPvGPofLxqNedYkc5yGqL4G2/+YVzLzpDkPVwlHTh41GvyRP/2NjaSpAN5iXIBruwFgEESiNAkC2NI2epskChQfaBW66TSR996JQ+ykH22y+my9+HXODci84QZD0cJV1494URduu3OWm5guy8WV/JxI83fkdb1bSTHfYcYC4b+k+CbOiriAIiEGkBgmykq4/CG4FqBNm1338vq1etkM1atjbFqOpnMUH2+9WrZMWypfa9NNhkk4rfh15bp6bNWzjXzrdrwYYNG0T7On+/apW0aNNW6tar55yrHDOlCLKrln8n663uLc2sB7jKNX23ZJE0aLhJ3g/KaZ3UqVNHmmzWvKiiEWSL4uNgBBDIIkCQzQLE5mgI5BtkP3l7pIz6zxMy/5tZnhvcpHFjad66rb3uV3/4szRs1Fge/eutMnvmF85+51x1g7zwwL3y6X/fste16bCF/RCSBkkz1bNC1M+uvTkwPLzyyAMyYfQHZlfp0W8nOeS0c5zlQma0JVaDx7JFCz2H12y5tb3cf78DZa9Dj/Rs0/60H776goy2+gi7H6rS+9naeqhq0NHHS9vNt/QcU8qFld8tkxf/fb9MGTvGLrueu1W79rK91df0gONPkevPPslTrnQPe30xYbyMfv0VGffuKE/xtujSVfTBqEFHHW/VQxPPtmIW5n39lTz055st6wWe8uk5jfeRPzvfHlkgqI/sqb/9vbz++HD7O6B9a3XSAN9zp/7W9+DslO+M9ndWJzN13q63HHnu+WZRXnl4mEwYs7HF9rTfXWkFULEeXnxEZlg25juhtjvsuY/sd8xJUr9BA+d494z+IfCSda1JVguw+eNCvw+9+u9m18lrjw23to1xDjn1ksul7RZbOctBMwTZIBXWIYBAqQQIsqWS5DxVFcg3yL797FNWOLgvY5mvHvaYHSo0JLpD6tbde8qsqRuHQ9KTbLfLbjJxzMZwquuO//XF0m+ffXXWmfQJ9+vOPMETgI49/zdFj5Rw6bGHONcImtnn8KPlp6ee5Wz6aOSr8uSdf3WW080cesa5ssdPDpe6deum26Wg9V9NmST/vvUGJyz5T7Jl1+7WsFUbxx7V7f4gu27tWqsO75X3XnrOf7hnWUOi1kW3HXb0rC904etpU+Qfl1+U8fDTL71atrWCqT/IapjctNlmKfdmTta8dRs574Y/2S3KZl22UQv0Dy13iD/szJ/Jq4/+2/MdM+fST7XV0Szq1a/vXi1fW8OEPXDzdWnrRIf8qmN9D/QPBzP9+pa/iv7BkGkiyGbSYRsCCBQrQJAtVpDjQyFQySAbdMOnDblSPnlrpHz+wX+dzT137C9nXHa1s6wz0z8bJ/dcd4Vn3TUPPC6NNi2uxTCfIDv+vbfl4aG3eMqQaeGgk06XQUcel2mXvLZpl4xbzj8rbWBKdzJ/kB1x9x3ygdWinOv0i+v/mPcYrkHnLibIBp3Pv26vQwfLoaef66zON8g6B2aYOe5XF8mOA/Zz9li39ge59dfnOq23zoYsMwTZLEBsRgCBsgsQZMtOzAUqIZBvkNUWwWmffiJjrJZJ89OrllN/iu68bS+7yAOt8KY/wfpbZM396L76M/Naa9D+K+992A6pw/5wrdlsf151/6OyadNmzrpnrKGa3n/leWdZW2y1tbDY6Y0nH5HF8+ZZIzC85jnV/seeZC937LGt3SKpPxff+utzUlrr9rbCk7bULZ4/z+py8XjK9ov+cpfUZPkJ2XPhDAtBreH6oNQ+hx1lt/x+8NpLoi3G/skdZLXu7r3hSs8u2i1kX6sbQYu2NdYLBKbIuy8849muraEX3naHFNsHWPuc6rixX0z83NM6qeff8X8t8H33HiRtOmye0iJrCqSt41169ZHlS5fIiLv/4fkO6n1c++CTZlfrD6TM48j6W2T1QG2F3v+4k2Srrj3sMr7w4L3O+XRm+933lpMvutRZ987z/7G6y9zjLOuMPlC216FHSB3rfx9a9ztm5Cue7bpAkE0hYQUCCFRYgCBbYXAuVx6BfIOsKUUuoxYEBdmzf3+9HQz1AaMlC+ZJq5r21kM76+QPPz/d09LobvnSMWuvPfN4T0g05zHlKeYzl4e93nnuafGHGh1cX0O5mfRNUkMvOs9Tzm133lVOH3KV2aWozz/+6mwrdM91zqEvCTjz8mvtB4vMykf+8kenD7JZ5w6y+geDGStXt2tw+78//k02a7XxwTt9KcT9N3lbxLWrxF6HDDanLOqzkIe99IL+kTGCQvnVwx53+vUWEmTPt/p3a4g103+ssPzhqy+aRbv/7s+v29gq768T/TXh9Euv8tSJDjXm/0OJIOuQMoMAAlUSIMhWCZ7LllagkkFW+wq6Q4D7Tl566H55a8TG1rQe/Xa2Q5ruo30L/3XNZc7uGr4u/+eDJXu6Ppcge/tlF3r6Z2oL4nEBLcL6k73+dG8mfyuhWZ/vp4b9K044wnOYPhTXZbvtPeu+mTFNbr/0N551JsiuWbVSrjnd29XhmF/+RnYedIBnf13Qh7I+e/8dZ722Mp74m985y8XMFBpkb3r0WU+dayv5Deec7CnKFf/6t+jrbHXKN8j26r+HnPpbb/eVT60H4R6x+tKaSR/guuRv/7IXc62ToHGKCbJGlE8EEKiWAEG2WvJct6QClQyyg446Tg468fTA8mtXg6EX/dKz7cr7HpEm1gM+z953l+fBpAFHHC0/OWXjA1iegwpYyCXI+oe0Ov7/LpF+1s/g/mnB7G/ktgt+7ll9+b8eLHqoMW2J1dY/96TdMvxDPGk/2itPOcq9m/Ow15yvvpS/XvIrz7Yhd9wvLa0uBf5Jfw5/6s6/OavdAc5ZWeBMIUFWH4zS8OeedMQIrRf3dNldD1ijZ7SxV+UbZAcOPlYOPvkM9+ns1mt3txftBvG722u7GwTVyRV3D08ZEuzHH3+Uy48/zHNegqyHgwUEEKiCAEG2CuhcsvQClQyyOrTSrgf8NO1N+Fs9tbVQH6y56WenerodXHDr36VDpy5pz5PvhmxBVrs2XH7C4Z7TpuvaENTq+csbbxMdsaGYSUd7uOMKb5/gGx99RurV8z5Br9fwP8BmWmS1f/Odv7/EU4wbH7HO4XsKX3cI6l7wh8ef9/xk7jlRHguFBFntwqFdOdzT2h9+kCtP9g6Ndtldw5xh4PINsgeeeJrdV9h9Df9Dhu4gG1Qn1w1/Shpu0sh9Cnve/4cQQTaFiBUIIFBhAYJshcG5XHkEKhlk3f1eg+5mjDUu61N3bWwF1D6g+1pjst515caftDt07CwX/On2oMMLXpctyOqJ/UFEH/jRB3/806J5c+TWX3nHtnW3Evr3z3U5qPUvqEVWn6L//UnecGeCrL7paujF3lbvdK3F/hDoDnC5ljndfoUEWf0unHXFdZ5TBt1rMUFWHyTT4dbc04zPP5W7r73cWeV2WDR3tj1igbPRmvnt7fdI63Yd3KushxpTAzdB1kPEAgIIVEGAIFsFdC5ZeoFSBdnDz/6F7HGw9+dT/8Ne2scy0ytCV69cIdeecbznJvvssbeMf29jX81SPnRkLpRLkNWf5PWneTP5h3oy6/19KrWP7DUPPFF0S2ZQf0z/w2ZahtlfzpC//e7/THHsTxNk9W1Y1511omebvmSgV//dPet04el/3m69LOElZ73/aX1nQwEz/iAb1DfVP45sJYKsvlRhb2sECPeUKcgGBemgsY0nfTzaGmfWOyoHQdatzDwCCFRDgCBbDXWuWXKBQoPsv2+90XrD0ntOeYJCgD/InmK9zaj3rns6xwTNPP7320TfHpZuSteCmG7/XNYHhb+bn/COs+p/GE3Pe/Ff/+l5g5f22fzrb3/lGVmgW59+cvaVN+RSjKz7+J+Q15dJnHLJFZ6XLmQbtcBfJ9r3VbtquIfWmjtrpvzl4vM95dG+o9qHtBTTey8/J8/ee5dzqqBxg6MQZPUG/H/gaIutvpjBvDp3+ZLF8s+rLxXzJjJz0wRZI8EnAghUS4AgWy15rltSgUKD7MNDb/a0lHbsuZ3sf8yJMs96dW3//Q+2+wn6Q5M+Ea6tb5kmf59E977ukQzc64udD3rQbPC5v7TDiPYf1aAV9LO8trZqgN+iSzd7KLGXH3ogJbD86g9D7XFmiy2jHh80ZqmOnqCtww0aNpT/vvhc4IsOTIusnkOHktIhpdyTvh72IKt/aIs2be2RGV4cfp9nCDEdJUKf1C/25RPmmh++ZpXhXxvLoI7aWr9i2TLZapvu0m7rjinjyIaxRVbvZ6z1R9dj1h9f7km9dhq0v2jf6rFvv+np3232I8gaCT4RQKBaAgTZaslz3ZIKFBpkX3jwHnnnuf8ElsW8BKCQIKtPePsf7jIXOenCIdJnj33MYsk+g35yNyfvu9dAOeGC39qL+vrSkU89ajZl/Rww+Bj5yclnZt0v1x0KfYuUO8hquNI+n19O+jzXy9pvWdMwX6pp0kcfio5DHDQd9fNf238IRaVFdv26dfZwZ+5uJ0H35V9HkPWLsIwAApUWIMhWWpzrlUWg0CD7pfV2pn9ePSSwTOdcdaN03b5vypu9cmmR1RPq27Zee2x4yrmvH/605yfwlB2KWKHj1Op4tf5JW5rPu752HFENgc/d/y/PG8b8+5tlbSU92HpFbf0GDc2qknzOnDzBflmBdmMImvRhuMbWG9Hc9+IOsnqMvhXrYWuc2FzC7DHnXSA773tg0KUKXvfDmtVy1anHBB6vb1Tb/7iTI9MiqzehfbuHWX1gv5o8MfCetHX/peH3e1q5CbKBVKxEAIEKChBkK4jNpconUGiQ1Tdz6duuXvz3fZ7C6c/Ep1x8uf32Lh0uSocoMtNpQ66U7XbezSym/Vw4Z7b86f/O9WzfZd+D5OjzvA8xeXYockGfQH946C2iD365p6DuDNPHj7Ve4/qs5w1Z5hh9Xa39GlXfiwrM9lJ8Llu0QB6/fajog0juSVuPdYiz54fd43ktalBo0pZvfZ3th9Zrbf33rOfU/reHnfkza4zZdu5LlGxe38o1/LabPOFOT65dHAZZr8v1j5oQ1I923dq11ggN3reNuftQj33nTXnsb39yyuzvnuDvjx30IOEXEz+Tf1l9XM2UbjxdHb/3Q+vhuC8nTrBeuTzW7obRfYedrIcb95auVj9p/6gXF1uvLm6b5dXFy5avkPGTp5pLS6eaVnLGgNK1jDsnZgYBBBIpQJBNZLXH76YLDbJGQv8PXPvF6vip7ay+luatSmZ7IZ9BPz3/7Jo/SJdefQo5Xc7HaDhftmihLLReaqAvGtCgUb9Bg7THa0vckvnz7D6Q+orXtptvGTgma9oTFLnhh+/XyJyZX1pjydaTDp27BI4pm8slvluySJYuWGCFylXSuv3m0rKmXdGjLORyXW3hXmj9AbHMerWvPiTVyhq2qk6dOrkcGpp99A81/d7oSxg2s94oVteqC/+k9+kfhziXIdkIsn5JlhFAoJQCBNlSanKuqgkUG2RLXXANh/dcd4WnldB+Ja316tG6deuW+nKcD4GiBPStX5M/Ge2cQwPtL2/8k/NSBt2gr/rVV/66p5seey7r95kg6xZjHgEESi1AkC21KOerikAYgqz+zDxl3Mey6rvvAofeOuT0c2TvQ72D/CvWimVLZf63Xxfspm9g2nKbbgUfn+uBUSlntvvR1+9q/9pCp+ZWq7W2+MZpev2Jh+T1xx/23JL2U+616+52/+hvv5hhB1n3Djo28kkXbuyu4N7mnifIujWYRwCBUgsQZEstyvmqIhCGIBs0JJTB0D63V1itsQ0bNTarnM8xI603gd258U1gzoYcZ9xvacrxkIJ2i0o5s92cDjOlw00VOu124CGiDz7FadJwf9sFP8/rlobccV9OfY8JsnmxsjMCCOQpQJDNE4zdwykQhiAb9OYjo6WDy3fssa1Z9HxGJSBGpZwe3IAFgmwAirVqytiP5Il/DA0cL9Z9hP5RNvicX0o/a+zfXCaCbC5K7IMAAoUKEGQLleO4UAmMmjBNRk2c4ZSp4xYdZOvNve+KdzaWaUZHDHju/rtl8fy59iDy2lKqP/nvst+BGVuu9I1cEz/6oOBSNWnWXHY/+NCCj8/1wKiUM9v96Jvc8h0v1X1OrdNSjkfrPne15/WBLg20499/135g8LvFi+witW7fQVrVtJeaLbeSvnsPksZNmuRc1Fmz58hX385x9h+43TYysFf5u8I4F2QGAQRiLUCQjXX1JufmZi5YLMNGuR5WadZM+vTk/yyT8w3gTsMqQJANa81QLgTiIUCQjUc9Jv4uCLKJ/woAEFIBgmxIK4ZiIRATAYJsTCqS2xC55omXPQx9enaX5s2aetaxgAAClRPwh1i98jXHHly5AnAlBBCIvQBBNvZVnJwb9PeTbU73guRUPncaSgF/kKV/bCiriUIhEGkBgmykq4/C+wX8rbLVeOjLXyaWEUiigD/EqgGtsUn8JnDPCJRXgCBbXl/OXmEBf6usXp4wW+FK4HKJFwgKsbTGJv5rAQACZREgyJaFlZNWU8DfKqtlIcxWs0a4dlIEdMxYHWpr2fLlKbdMa2wKCSsQQKAEAgTZEiByinAJ+EcwcJdOA632neUhMLcK8wgUJ5ApwOqZaY0tzpejEUAgvQBBNr0NWyIskCnMum9LQy0TAggULhDU+uo+2xkD+0untq3cq5hHAAEESiZAkC0ZJScKm4CG2ZnzF3ne+BW2MlIeBOIq0KmmldUS25UQG9cK5r4QCIkAQTYkFUExyiegD4Dp5H6FbfmuxpkRSLYAATbZ9c/dI1BpAYJspcW5XtUETKCduXCJ1VK7uGrl4MIIxE1Aw6tOtMDGrWa5HwTCL0CQDX8dUcIyCmj3A6Z4CmTqVqIPH3WqaR3PG6/wXdH/tcLgXA4BBDwCBFkPBwsIIBAngaBxhc398RCSkeATAQQQiK4AQTa6dUfJEUAgB4F0YVZ/Dj9jQP8czsAuCCCAAAJhFSDIhrVmKBcCCJRMYNhbowP7RRNmS0bMiRBAAIGqCBBkq8LORRFAoNIC6cIsg/VXuia4HgIIIFA6AYJs6Sw5EwIIhFgg00syCLMhrjiKhgACCGQQIMhmwGETAgjES4AwG6/65G4QQAABgizfAQQQSJRAuoe/FIGW2UR9FbhZBBCIgQBBNgaVyC0ggEB+AoTZ/LzYGwEEEAirAEE2rDVDuRBAoKwChNmy8nJyBBBAoCICBNmKMHMRBBAIowBhNoy1QpkQQACB3AUIsrlbsScCCMRQgDAbw0rllhBAIDECBNnEVDU3igAC6QQIs+lkWI8AAgiEW4AgG+76oXQIIFAhAcJshaC5DAIIIFBCAYJsCTE5FQIIRFuAMBvt+qP0CCCQPAGCbPLqnDtGAIEMAoTZDDhsQgABBEImQJANWYVQHAQQqL4AYbb6dUAJEEAAgVwECLK5KLEPAggkToAwm7gq54YRQCCCAgTZCFYaRUYAgcoIDHtrtMycvzjwYrzONpCFlQgggEBFBQiyFeXmYgggEDUBwmzUaozyIoBAkgQIskmqbe4VAQQKEsjUzaBTTSs5Y0D/gs7LQQgggAACxQkQZIvz42gEEEiIQKYwqwRnDOwvndq2SogGt4kAAgiEQ4AgG456oBQIIBABgWxhln6zEahEiogAArESIMjGqjq5GQQQKLcAYbbcwpwfAQQQyF2AIJu7FXsigAACtsDMBYtl2KjRaTVomU1LwwYEEECgpAIE2ZJycjIEEEiSACMaJKm2uVcEEAijAEE2jLVCmRBAIDICmboaMKJBZKqRgiKAQEQFCLIRrTiKjQAC4RHIFGa1lIxoEJ66oiQIIBAvAYJsvOqTu0EAgSoJZAuz9JutUsVwWQQQiLUAQTbW1cvNIYBAJQUqGWb1WgN7davk7XEtBBBAIHQCBNnQVQkFQgCBKAtkG9FA+80O3K5r0S9PuOaJl63zbEOYjfKXhbIjgEDRAgTZogk5AQIIIOAV0DA7auJ0mTl/sXeDa6mYEOoOy8Wcx1UcZhFAAIFIChBkI1ltFBoBBKIgUK6uBu4gqw6E2Sh8GygjAgiUQ4AgWw5VzokAAgj8TyBbmNXd8g2iQefM9xxUEAIIIBAHAYJsHGqRe0AAgVALlLqrgb5VTM/pnwizfhGWEUAg7gIE2bjXMPeHAAKhEQhqSXUXLtcHwdIFWT0XYdYtyjwCCMRdgCAb9xrm/hBAIFQC2cKsFjZbGNURCzJN2Y7PdCzbEEAAgSgJEGSjVFuUFQEEYiOQLdCmC6P+B73SgaQ7Pt3+rEcAAQSiKECQjWKtUWYEEIiFQLYwqzfpD6S5Blk9tlPbVvbrcXWeCQEEEIijAEE2jrXKPSGAQGQE8n0QLJ8gqwiE2ch8FSgoAggUIECQLQCNQxBAAIFSC+TaOqvXHTVxRl6XJ8zmxcXOCCAQIQGCbIQqi6IigEC8BXIJsy2aNJalK1fnDUGYzZuMAxBAIAICBNkIVBJFRACBZAnkEmgLEdEwO7BXV7u7QSHHcwwCCCAQNgGCbNhqhPIggAAClkC5wqzinjGwP2GWbxkCCMRCgCAbi2rkJhBAIK4C5Qq0hNm4fmO4LwSSJUCQTVZ9c7cIIBBBgRFjxsu4mbNLXnLCbMlJOSECCFRYgCBbYXAuhwACCOQjkO9wW/mcW/clzOYrxv4IIBAmAYJsmGqDJ6RTIAAACLpJREFUsiCAAAIugXKHWHMpwqyR4BMBBKImQJCNWo1RXgQQSIRApUKswfS/Qcys5xMBBBAIswBBNsy1Q9kQQCCRApUOsQaZMGskMn9OXjo18w6+rZOX1O7fs2V335bMiz1b5Ld/5rOxFYF4ChBk41mv3BUCCERUoFoh1nAlKcyaQGqCpjGYsmyambU//ds9Gyu44A7CPZp3S7my2U4ATqFhRYwFCLIxrlxuDQEEoiVQ7RBrtKIeZk1A1fsxIdSEU7Ns7rWQz6ZNm+Z1mNl/xYoVeR2X7/5BJzfh1h18dR1hN0iLdVEUIMhGsdYoMwIIxE4gLCHWwEYhzJrAquG0kKBqAqbes3e+iWGwP93bPBsqvOAOtitWrEy5utluPlN2CFihoZaQGwDDqsgIEGQjU1UUFAEE4iygLz6YuWCJfYsaasMwhSXMmsD6zMwXbJZsraru4GnmmzbdGE7NujAYl7sMGmr9obd2XfbWYXfIpRW33DXF+QsVIMgWKsdxCCCAQBkFTJidOX+RHXDNchkvGXjqSofZfEKrCaT6aYKqWRd4M6xMEfAH3Wwhl3CbQsiKKgsQZKtcAVweAQQQyEdAA60Jt3pcJQJuucJsrqHVhNP27dvZVGY5Hzf2zV/AHXIzBVx3uB3c+dD8L8QRCBQhQJAtAo9DEUAAgTAImDBbzoDbqW1L6y1guxZ9uxpeM3URMCGV0Fo0dVlOkEu4NcGW7ghlqQJO6hMgyPpAWEQAAQTiImAC7qgJ0+1bMsvF3J+2znaqaS2d2rbK6TQaXM3DWEF9WzW4ElpzogztTibcpmu1JdiGtupiUTCCbCyqkZtAAAEEchMwYbbY1ttONa2kU5uWMrBX6nimptU1XXDV8Kp9Wk3ra24lZ6+oCGQLtkd0OkRorY1KbYa/nATZ8NcRJUQAAQTKLmAC7ogxn8nSlavzup620q5psFA+XfWBfZw7wJqwqq2uZj6vk7Nz5AXmzp1n38PcuXNT7kVDLf1qU1hYkYcAQTYPLHZFAAEEkiKgw4FNnr1A5i79Lu0tr2mwQL63AuyaBovs/8yOJrASXo0In0bAtNb6Q63pfkCoNVJ85ipAkM1Viv0QQACBBApooNVp1MQZzt3/UH+ZrNxkluinhlidCK8ODzM5CqRrqaWVNkdAdrMFCLJ8ERBAAAEEchL453svyrgV7znhVQ/SAEvLa0587JRBICjUaqDViVbaDHBsEoIsXwIEEEAAgYwCQQ9vEWAzkrGxCAENte6uBwTaIjATcChBNgGVzC0igAAChQj4AyzhtRBFjilUICjQ0jpbqGZ8jyPIxrduuTMEEECgYIERXz7vvLjAH2AXT18ky2enfwgs3UU77LiFNGzaMN3mnNevWbpa5o2vfQK+1TatpdkWm9nHrl+7Xr55f5Y937RdU2ndo23O5wzDjt98MEvW/7A+76I0bNJQOuy0Rd7HReUAAm1Uaqo65STIVsedqyKAAAKhFNBW2FvGDnXK1r59e+eFBWblmH+8L1OemWgWc/78yd8PL0m4nPPxt/LGZS/b193p57vKtkf3tudXLVwpT5/0qD3fZf+ussfvBuRctjDs+NgRD8ra1WvzLkrTDs1k8APH5X1c2A7YsGGDfPHqNFk5f4X0OXXHlOIRaFNIWGEJEGT5GiCAAAII2AKZWmHdRARZt0bp5pMcZNetXievX/qiLJy0QLY5sJvsfsk+gbAaZt1vEGOEg0CmRK0kyCaqurlZBBBAIFjglnFD7VfJ6tauXbdxhtMK2tsdZLc7dntp16dD0G4p62r6tJcGjRukrM93xeJpi2TMHe/Zh/U8srd03KezPR+nFtkB1+wvdevVzYmmQZMGUtO7fU77hnWn1YtWyVMnPmIXL1OQNeV3t84SZo1KMj8Jssmsd+4aAQQQsAXcXQn8fWHTEbmD7D5X7idb790p3a4VXR+nIHvSi2dK3fq5BdmKIpfpYvkGWS2GtsxOn147vrG+UGFI3wvLVDpOG2YBgmyYa4eyIYAAAmUWOPPN8+wraIjVlthcpnIF2XXfrxMNNPUa1JPGrTaVOvXq5FIcZ59KBlkt69qVP9jldAoQMKMPpm1Yv0Eat940YKt3lbtrQViCrP7kv+a71dKkbVOpUze/+jB3t3rxKrt1eZPmjcyqlM9Cgqw5iYZZDbWEWSOSrE+CbLLqm7tFAAEEHAHTJzbogS5np4CZUgbZWe/OlBmvTJUlMxaLBlH3tPnOW0rfs3aWVl1bu1db/Sjny/t/fsde1/uEHaTzfl3t+UxBdtRVr8nyOd/Jpq2byH43H+w5n1mY8uxEmfrcJHtxwFX7y2ZbNbfn5306R0bfXtuVQVugJz89Qaa9ONne1rpHG6tPZ3fpfti25jSiozp8NnysLJg4T9YsXWOv1y4VbXu3sx5i6idtetY4+7pnig2yy2Ytlbevf8M+Zdtta2S3i/Z2n94zP/beMaKjJOi012WDpGWXVs52Dd/jhn0sc8fOlhVzljvr2/aqkW4/7SldDujmrDMzGuxfueA5+XH9j7L9SX2lZvv28ql1jjmfzHbqddM2TWSL/ltJv3N28Yxe8frvXpIV85Z7rtW8Ywv71Afc+lNp1KKxuUzaT8JsWprYbyDIxr6KuUEEEEAgVcCEWN3St+8OqTtkWFOKIKvB5+1r35DZH32T4Uq1m/a6fKB0GrixtbiQUQtGnP64HZQatWgkxzx+cuA1x933kXz+6Kf2tp/+Y7C06lYboL9+7yt565rX7fU1Vhid//k8z/FdDrBGSPitNULCBpEJT4yXsfeM8Wz3L+xw+o5W2OtnPW7t3VJskNWW36dOfNgJz0c/emJgi/G6Nevk0cMfsC+uAfvYp05xujHMHvONvHPDyIyjJ2if5F1/s5cnjK5duVYeO/JB+5xdD+5u1eu3ToD13qX1NjhrlIUDbztENNjq9ORxDzll9u971EMnyKZta/fzb3Mvu7sZDOl3ofRs0d29mfkYC/w/AAAA//89igpSAAA9JUlEQVTt3Qe4FcX5x/EXGyqgdFCR3hSk2LuAWBGxYYsNY6wxBmPEFBNj/hoxMZoYS2yosbeAit0ANrCCKChKtQKKdFCx/Pedyyxzzj3n7ulny3efR+6erTOfOc/j786dna33o7cICwIIIIBAYgTeX/KBjJx8talv69atpXXrVnnV/fXrJsqMMdPNOXtfvK+03at9XufrwR889p68du0r5ryNG28sPY/rI007NxOpJ7L802Uy7f6psuyTpWZ/wy0ayZDbhkq99b2d3vL5m5/K8795yqzvcMYuss2RPc36qi9XyiPH32fWOw7sLLtfuI9Z139Gn/yArPh8uei9jnrgJ/52d2XKbW/Iu/e9bTYdfN1h0rSLVx5v+fiVeTLhkufMuv1nw002lPretfSaA0ceJK37bilzx8+Sly4fbw+RbodtK1vttLXUW6+eLHx3vrxz9xR/356/7Sft+3XyP+vK/UPulDWr15htxz8xTNbbYL2U/bl8mPqft2TqfyabQ3f6+W7S7dBta502b8JsefGycWZ7j2N6Sd+f7mTWtS7qZJctd2ojnQ/sZswWz/lK3rlrsny95Guzu+sh28jOv9jdHiprVq6R+w+/0/+sK5u12Vy6Dt7GnP/F9IX+d0b39Ty2t/Q5dUddlY9fnicrFiyXN2981Xxu1q25dD+sh1lvu2cHWb/++mY96J/58xfI/PnzzWGj+t8QdDj7YyJQjyAbk5akGggggECOAqPnPC5j5o71Amz+IVZv4QbZTZs3kAatGtR552bdWsiOZ+7qH6P9J2NOedCEQN04+JYjZfO2jf39uvLdN9/Jo8MeEg2nuhxy8xHSuF0Ts17tIKvhr8+wHU1AXf7ZMmnYupF8/+33Mvqk+/2gN+CyA0SDoLt8+d5Ceeq8x8wmDdSH3XmMbLDxBv4hbpDV8L7e+rkF2QOuGSz1N6tvrqPlUVtdNBAedO0Qs+7+M+7iZ+TTVz82m1zXCX96zoRK3bHd8X2k98k7mF8s7LmrF62S5y56UpbOW2I2HXy9F/b1lw9vSQ+yWvd9Lhko62+4LoS6QT/9Fwq99sPH3Wuu1Wn/LrLbBXub9Xz/mTlzlqxYsUK6N+kqI/oMz/d0jo+gAEE2go1GkRFAAIFiBIaNO8uc3qdP74Iu4wbZXC7Qus+WMvDKg/xDNZy+/q+JsvSjJdKiR0vZ7VeZQ4v22GrPrS4H/mOwNN+mpVmvZpDV4D7kjqEpAU0LNfPJGTLp6pdM+bQXcudz1/VWmo1r/5kyyuv1vbem13ePEftIh307+7vdIOtvzGHlyHuPk02abeof+cz5j3s9wAvM50NvO8r0jNqdq7/yAuOxNYHRDbrLPl4qj/70IXPY5u0ay6AbDs/YI+zadz6wq+x6/l7mnPQgO+T2odJoy83sbf2fbh3dXudSBVkNsRpmCbI+eexXCLKxb2IqiAACCKwTsMMKGjZsKJ07p/5pe91Rda+5QVb/xL5hg43qPKFV79ayx4h+dR7j7lyzao18+f5CmXbfVJk/5TOza/+/D5KWPVubdTdMVXpowdZ7tJN9/jjQLa5Z1z+Lv/fIu2Z9378cKFvssFWtY3SDDjF45vyxZt92P1nb67n2SDfkaU+nHUqxdnfWH/tefpBs1GhdG8x5fqa8PHKCOb73SdvLdif09c+dMXq6vH79RPN5l1/uIV0O7m7W3eETPY/z/uzv9ThnWr5f873cO+h2s6v5Ni28XzAONetukE3vbXWv8+S5Y2TRjC/NpmMfO1k2qF/TI12qIKsXnjKl5hcFhhe48vFdJ8jGt22pGQIIIFBLoNhhBXpBN8gWOkbWFkz/FD5/8meyZO5i0V7BxbMX+X+et8foz7AEWXdMqVs+98/17va61tvt00H2+t0A/xA3yLq9lf4BOa58t/o7uW/IHeZo7UE+4u5j/SECY8/8r2f8ldl39CMnykYNawLwew+9K2/eVDNGNcfbiP4Sc8yYk8zhbpDVXvYDrh6c8TKu07FjvCC7SemDrB1eMKLvcOneuGvGcrAxPgIE2fi0JTVBAAEEAgXCEmS/XfGtvPqPl2TehDmBZdYDwhJksz1A9eipD/kPp+VUIe8g90/7ek6pgqxea9I1L8nMJ2boqj8sQ39ZePz0R8y29gM6yp4X9Tfr+o/7y4m/MYcVDbIaaN0gu/XuXq+1Nz420zL+D8/KJ5M+MrvKFWTtQ19D2g+SwzockqkYbIuRAEE2Ro1JVRBAAIEggTAEWf3z9P9++7QsePtzv7j6cFOzrs2lcYcm0rRTczOMYOpdb8l7D9f8ub4SQdYdHpBt1gJ9Ul+f2E9fnh7+mHwxbaHZvMt5e3h/6q+ffkitz9obusX264YglDLIfjF9gTz9y8fNPXX2hJ3O3k3c8bn7XuENf3Du7c52oPVr1WeLWuXNtKHtHu3NEAiCbCYdtlVCgCBbCWXugQACCIREoNRjZAsZWvDZ65/I/373tBHR3rz+l+3vj391mfQYPVaX/f56sLTqXROuihkj6/453L2Xrr88crzMeX6W2ZxvkJ309xdl5lMfmHMPuPoQ7yG2/KY00xNLGWTFm1hz9Ck1U45pnXUYwcPH3WOGbegY1iPvPT5lDK47rlanxdLpsfJZwhRkGSObT8tF/1iCbPTbkBoggAACeQmUctaCQoKsO1/rDqd788AeVTMPrFuJH7//0Q9eut3O1arrhQTZx372sD9tlPuQkV5PF73ff0+835/uK98gO+2BqTL5ltfNtXQO1B3PXjfdmNm49p95L8yRSVe9KNoDrWNkdf5cu5Q0yHoXnf7gO/LWza+Zy2//s5399V4n9pVeJ25vb2t+ulODNenYVLT+mR42W7lghRmeoHPo6nH2wbdSBtn0OYBTCprDB4JsDkgxOoQgG6PGpCoIIIBALgIjp1wt7y/+oCTzyBYSZPWhIn24SJf0ifV1m84zqz2cs57+UD+axZ2XtZAgq0MZPnujpne3z7AdUgKk3iB9jGi+Qdadv1Wvd8A1Xq/stqm9st8u/1ae+Plof/7cvX4/QNrt3UEPN0upg6z7ggh7D/05ZJQ3NdZWm7mb5IfvfjAB1b6EQoPvtkO3SzlG2+Wly8f545q7DfGGLJyzmzmm2CCrNg8c+R9zrS13bCMDLj8g5d65fmB8bK5S8TmOIBuftqQmCCCAQE4CdniBHqxTcOlUXPksbugrJMjOHee9Aesv4/1b6pyrW+7cRjZqUF+0Z3D2sx96b8ma7e/XFXfO1UKC7KynP5CJXk+oXbRXsuV2reWbZd/Ixy/NrXW/fIOsXtcdZ6qf9Y1ZbXZta3o2P3/jU/lg7Ht+r7D2yB5661Epc7W6QVbflqZvBMt16XFMb//lBO457iwBur1lz1beg3OZH4DSqc6eu/BJ/3R9MYF5M1eTTWThO/PNsAv7y4Ae5M5RW2yQdaf10mvr7BCbNN1UOg7skjK1mO7LttgQq/uZsSCbUvy2E2Tj16bUCAEEEAgUsA99FTKfbLFBVnv/9OEoO59otsLq0+86v6kubs9tIUH2u6+/k8fPeMTvDc10Tw1us56p6QUuJMjq28hevmK8/3asTPfQbTpGVedf1TDrLm6Qdbfnst7v0v1MaE4/9iMvpL9w6fP+5t0u2Es67Z99Sqp37pksb9/+ln98tpX+f95fttpla393sUFWL/TUeY96v8h84V9TV9IfSkvZmfbBDilgtoI0mJh/JMjGvIGpHgIIIJBNoNAhBm9cP0neHz3NXFbHSOpLAvJdvl682jxFbx+Qcs/XXswdztxFGrRsKPccPMrsMg9pjfbmLPU6KXXe2edG1PQc7njWrtL98B7mGPetVZ0O8F5zmvbGMA1br1/3isx+bqZ7OzPWc/vTdzY9oLZH0n39qr7OVXs2ddl1+J7S+aBuKeenf9Ae57duft0fb+vu1/GzprfReROX3V9MkE0Plvaa+urch4beLWtWrzGb7HRZdn+mn4s++FJeu/bljL9oaNvo+NqmXWpeTWvPd+eu1d5k7anPtEy4xHsN7tpfTtLHKq/6YqWM/+Oz8tXMRf6p2luvvcJBi+2NJcQGScVvP0E2fm1KjRBAAIGcBNwhBq1bt/bGzKaO6czpIkUepPPJrpi/XDTYNmzdyLzWNNNDRkXeJuV07RHW1+N+s+xr78/xzf2XAqQcVIIPGpyXfbJEvl35rQnlm7Zo4L/JqgSXL/sl9M/9yz9dJhow9RW4+ouFfYFCOW+ub/nS3u2NGtaX+jqNWcAICxtitUy8zaucLRPOaxNkw9kulAoBBBCoiIAdYqA3q1aYrUhFuUksBexbvLRyjIuNZRMHVoogG0jEAQgggEC8Bdye2ULGzMZbh9qFUWDFihWiIVaX7k26ig4p4HW0YWyp8peJIFt+Y+6AAAIIRELAjpnVwtI7G4kmS2Qh3V5YDbEj+gxPpAOVrhEgyPJNQAABBBDwBdyhBrqRQOvTsFJlAXcsrBaFB7uq3CAhuT1BNiQNQTEQQACBsAhomJ2x9EPz0gQtE2E2LC2TzHLoMAINsfpTF4YSJPN7kK3WBNlsMmxHAAEEEi6QqXdWSaoxu0HCmyKR1SfAJrLZ8640QTZvMk5AAAEEkiVAoE1We1e7tukBVsvDMIJqt0p470+QDW/bUDIEEEAgVALpgVYLx7CDUDVRZAuj4XXFipXeEIL5fh0YQuBTsFKHAEG2Dhx2IYAAAgjUFsgWaPVIhh3U9mJLdoFMva8E2Oxe7KktQJCtbcIWBBBAAIEcBDTQ6jJm7tiUo7WXVhdCbQoLH9YKZAqvuosAy1ekEAGCbCFqnIMAAgggkCKgodad6cDuJNRaieT+1OCqizvzgNUgvFoJfhYqQJAtVI7zEEAAAQQyCmQLtfrWMP1PF3prM9LFZqPtddUK2SBrK0d4tRL8LIUAQbYUilwDAQQQQCCjQLbhB3qwDbYNGzbwA27Gi7Ax9AJ1BVctvA2vZr1x19DXhwJGR4AgG522oqQIIIBApAXeX/KBeclCpiEIWjEbbGvWCbdhbWzbw6pDBXSxn93yanDttnkXE2C7E1xdGtZLLECQLTEol0MAAQQQyE3A9tZmC7Z6FcJtbpblOsqG1LpCq95bg6suOt8rwdVQ8E+FBAiyFYLmNggggAACdQvYHls9qq5wq/sJuKpQmsWGVZ3Hdd16zQName7ghlbdT3DNpMS2SgkQZCslzX0QQAABBPIWyCfc6sXdgFvzuYG5p25P6mLDqdZfw2rNz5qg6u4zO9L+IbSmgfAxdAIE2dA1CQVCAAEEEKhLQMOtLu8vrvkZ1Hubfi0bau1P3a8PnLmLu8/dHpb1TAHU7VHVcmY6JlP5bVjVfTo0wC70tFoJfoZZgCAb5tahbAgggAACeQm4Pbh6ooZcXWzoNR8K+Cc92KZ/LuCSWU/JFEAzbct6gbQdblDVB7B0sdsIq2lYfIycAEE2ck1GgRFAAAEEihFI79HVa9nAa69bbPC11ynXTxtE3evbWQLsNkKqleBnnAUIsnFuXeqGAAIIIFASARt+7cXKHXTta39H9B1ubkkotfL8RCBVgCCb6sEnBBBAAAEEqi4wcsrVZjiEBllCbNWbgwKEWIAgG+LGoWgIIIAAAskU0B7gkZOvNmNZR/Sp6ZVNpgS1RqBuAYJs3T7sRQABBBBAoCoCw8adZe5Lr2xV+LlpRAQIshFpKIqJAAIIIJAsATu8QB/solc2WW1PbXMXIMjmbsWRCCCAAAIIVEzADi/QG47qf0PF7suNEIiSAEE2Sq1FWRFAAAEEEiVge2UZXpCoZqeyeQgQZPPA4lAEEEAAAQQqKWB7ZRleUEl17hUlAYJslFqLsiKAAAIIJE6Ah74S1+RUOA8BgmweWByKAAIIIIBApQXs8AJ6ZSstz/2iIECQjUIrUUYEEEAAgcQKMLwgsU1PxXMQIMjmgMQhCCCAAAIIVFPA9sry0Fc1W4F7h1GAIBvGVqFMCCCAAAIIOAL0yjoYrCLgCBBkHQxWEUAAAQQQCKsAD32FtWUoVzUFCLLV1OfeCCCAAAII5CjA8IIcoTgsUQIE2UQ1N5VFAAEEEIiqAMMLotpylLucAgTZcupybQQQQAABBEooQK9sCTG5VCwECLKxaEYqgQACCCCQBIHRcx6XMXPHCnPKJqG1qWMuAgTZXJQ4BgEEEEAAgRAI2OEFWpRR/W8IQYkoAgLVFSDIVtefuyOAAAIIIJCXAMML8uLi4JgLEGRj3sBUDwEEEEAgXgK2V5bhBfFqV2pTmABBtjA3zkIAAQQQQKBqAswpWzV6bhwyAYJsyBqE4iCAAAIIIBAkYIcX0CsbJMX+uAsQZOPewtQPAQQQQCB2AnZ4gVaMh75i17xUKA8BgmweWByKAAIIIIBAWARsr+yIvsOle+OuYSkW5UCgogIE2YpyczMEEEAAAQRKI2B7ZRleUBpPrhJNAYJsNNuNUiOAAAIIICA89MWXIOkCBNmkfwOoPwIIIIBAZAUYXhDZpqPgJRIgyJYIkssggAACCCBQaQGGF1RanPuFTYAgG7YWoTwIIIAAAgjkIcDwgjywODR2AgTZ2DUpFUIAAQQQSJIAwwuS1NrUNV2AIJsuwmcEEEAAAQQiJMDwggg1FkUtuQBBtuSkXBABBBBAAIHKCjC8oLLe3C08AgTZ8LQFJUEAAQQQQKAgAYYXFMTGSTEQIMjGoBGpAgIIIIBAsgUYXpDs9k9y7QmySW596o4AAgggEBsBhhfEpimpSB4CBNk8sDgUAQQQQACBsAowvCCsLUO5yilAkC2nLtdGAAEEEECgQgIML6gQNLcJlQBBNlTNQWEQQAABBBAoXIDhBYXbcWY0BQiy0Ww3So0AAggggEAtAYYX1CJhQ8wFCLIxb2CqhwACCCCQHAGGFySnralpjQBBlm8CAggggAACMRKwwwtG9b8hRrWiKghkFiDIZnZhKwIIIIAAApEUYHhBJJuNQhcoQJAtEI7TEEAAAQQQCKMAwwvC2CqUqVwCBNlyyXJdBBBAAAEEqiTA8IIqwXPbigsQZCtOzg0RQAABBBAorwDDC8rry9XDI0CQDU9bUBIEEEAAAQRKIsDwgpIwcpEICBBkI9BIFBEBBBBAAIF8BRhekK8Yx0dRgCAbxVajzAgggAACCAQIMLwgAIjdsRAgyMaiGakEAggggAACqQIML0j14FM8BQiy8WxXaoUAAggggIAwvIAvQdwFCLJxb2HqhwACCCCQWAGGFyS26RNTcYJsYpqaiiKAAAIIJE2A4QVJa/Hk1Zcgm7w2p8YIIIAAAgkRsEFWqzuq/w0JqTXVTJIAQTZJrU1dEUAAAQQSJ8DwgsQ1eaIqTJBNVHNTWQQQQACBpAnYXtnuTbrKiD7Dk1Z96htzAYJszBuY6iGAAAIIJFuAIJvs9o977QmycW9h6ocAAgggkHgBhhck/isQWwCCbGybloohgAACCCBQI0CQ5ZsQVwGCbFxblnohgAACCCCwVoDhBXwV4ipAkI1ry1IvBBBAAAEE1grYIKsfmYaLr0WcBAiycWpN6oIAAggggEAWAYYXZIFhc6QFCLKRbj4KjwACCCCAQG4CNsgOaT9IDutwSG4ncRQCIRcgyIa8gSgeAggggAACpRCwwwuYT7YUmlwjLAIE2bC0BOVAAAEEEECgzALDxp1l7sA42TJDc/mKCRBkK0bNjRBAAAEEEKiugB1eMKLvcOneuGt1C8PdESiBAEG2BIhcAgEEEEAAgSgIjJ7zuIyZO1YYXhCF1qKMuQgQZHNR4hgEEEAAAQRiIMA42Rg0IlVIESDIpnDwAQEEEEAAgXgL2HGyDC+IdzsnpXYE2aS0NPVEAAEEEEDAE2CcLF+DOAkQZOPUmtQFAQQQQACBAAGGFwQAsTtSAgTZSDUXhUUAAQQQQKA4AYJscX6cHS4Bgmy42oPSIIAAAgggUHYBhheUnZgbVEiAIFshaG6DAAIIIIBAWAQIsmFpCcpRrABBtlhBzkcAAQQQQCBiAgwviFiDUdysAgTZrDTsQAABBBBAIJ4CNshq7XhdbTzbOCm1IsgmpaWpJwIIIIAAAo4AwwscDFYjK0CQjWzTUXAEEEAAAQQKF7BBdkj7QXJYh0MKvxBnIlBFAYJsFfG5NQIIIIAAAtUSsMMLujfpKiP6DK9WMbgvAkUJEGSL4uNkBBBAAAEEoitgX1fLONnotmHSS06QTfo3gPojgAACCCRWwA4vGNF3uHRv3DWxDlQ8ugIE2ei2HSVHAAEEEECgKIHRcx6XMXPHCsMLimLk5CoKEGSriM+tEUAAAQQQqKYA42Srqc+9SyFAkC2FItdAAAEEEEAgogJ2nCzDCyLagAkvNkE24V8Aqo8AAgggkGwBxskmu/2jXnuCbNRbkPIjgAACCCBQhIAdJ8t8skUgcmrVBAiyVaPnxggggAACCFRfgHGy1W8DSlC4AEG2cDvORAABBBBAIBYCdpws88nGojkTVQmCbKKam8oigAACCCBQW4BxsrVN2BINAYJsNNqJUiKAAAIIIFA2AYJs2Wi5cJkFCLJlBubyCCCAAAIIhF3APvDFixHC3lKUL12AIJsuwmcEEEAAAQQSJsADXwlr8BhVlyAbo8akKggggAACCBQqwANfhcpxXjUFCLLV1OfeCCCAAAIIhESAcbIhaQiKkZcAQTYvLg5GAAEEEEAgngIE2Xi2a9xrRZCNewtTPwQQQAABBHIQYJxsDkgcEjoBgmzomoQCIYAAAgggUHkBgmzlzblj8QIE2eINuQICCCCAAAKxEOCBr1g0Y6IqQZBNVHNTWQQQQAABBLILME42uw17wilAkA1nu1AqBBBAAAEEKi5AkK04OTcsUoAgWyQgpyOAAAIIIBAXAcbJxqUlk1MPgmxy2pqaIoAAAgggUKcAQbZOHnaGUIAgG8JGoUgIIIAAAghUS4AHvqolz30LESDIFqLGOQgggAACCMRUgHGyMW3YmFaLIBvThqVaCCCAAAIIFCJgg+yQ9oPksA6HFHIJzkGgYgIE2YpRcyMEEEAAAQTCL8A42fC3ESVcJ0CQXWfBGgIIIIAAAokXIMgm/isQKQCCbKSai8IigAACCCBQfgEe+Cq/MXcojQBBtjSOXAUBBBBAAIHYCNhxsiP6DpfujbvGpl5UJH4CBNn4tSk1QgABBBBAoCiB0XMelzFzxwoPfBXFyMkVECDIVgCZWyCAAAIIIBAlAcbJRqm1kl1Wgmyy25/aI4AAAgggUEvABlndMar/DbX2swGBsAgQZMPSEpQDAQQQQACBEAnYB74YJxuiRqEotQQIsrVI2IAAAggggAACPPDFdyAKAgTZKLQSZUQAAQQQQKDCAgTZCoNzu4IECLIFsXESAggggAAC8RawMxd0b9JVRvQZHu/KUrvIChBkI9t0FBwBBBBAAIHyCdgHvgiy5TPmysULEGSLN+QKCCCAAAIIxE6AIBu7Jo1lhQiysWxWKoUAAggggEDxAnbmAqbgKt6SK5RHgCBbHleuigACCCCAQOQFeOAr8k0Y+woQZGPfxFQQAQQQQACBwgQIsoW5cVblBAiylbPmTggggAACCERKwM5cMKT9IDmswyGRKjuFTYYAQTYZ7UwtEUAAAQQQyFuAB77yJuOECgsQZCsMzu0QQAABBBCIigBBNiotldxyEmST2/bUHAEEEEAAgUABZi4IJOKAKgoQZKuIz60RQAABBBAIuwBBNuwtlOzyEWST3f7UHgEEEEAAgToFmLmgTh52VlmAIFvlBuD2CCCAAAIIhFmAIBvm1qFsBFm+AwgggAACCCCQVYApuLLSsCMEAgTZEDQCRUAAAQQQQCCsAsxcENaWoVwqQJDle4AAAggggAACWQUIsllp2BECAYJsCBqBIiCAAAIIIBBmAWYuCHPrJLtsBNlktz+1RwABBBBAIFDABtkRfYdL98ZdA4/nAAQqJUCQrZQ090EAAQQQQCCiAsxcENGGS0CxCbIJaGSqiAACCCCAQDECBNli9Di3nAIE2XLqcm0EEEAAAQRiIMAUXDFoxJhWgSAb04alWggggAACCJRKgJkLSiXJdUotQJAttSjXQwABBBBAIGYCBNmYNWiMqkOQjVFjUhUEEEAAAQTKJWBnLhjV/4bAW+hQhMM6HBJ4HAcgUKwAQbZYQc5HAAEEEEAgAQK5PPClAXbGkg+Nhk7VxYJAuQUIsuUW5voIIIAAAgiEXECHDgTND1tXkLUPg9lq6rUIslaDn+UUIMiWU5drI4AAAgggEAEBO2xAA2i3xl2ke5OutYKtDatD2g/yhw3YbelV5MUJ6SJ8LpcAQbZcslwXAQQQQACBiAhkC6S2l3ZIh0Hy0ucT5eX5k2qCrhd4x8wdm7V2BNmsNOwosQBBtsSgXA4BBBBAAIGoCWQLsoXWI5cHwgq9Nuch4AoQZF0N1hFAAAEEEEiogB1eUGz1GR9brCDn5yNAkM1Hi2MRQAABBBCIqUCpemUJsjH9goS0WgTZkDYMxUIAAQQQQKDSAqXolWV8bKVbLdn3I8gmu/2pPQIIIIAAAr5AKXplCbI+JysVECDIVgCZWyCAAAIIIBAFgVIEWR70ikJLx6eMBNn4tCU1QQABBBBAoGiBYoYXMD62aH4ukKcAQTZPMA5HAAEEEEAgzgLF9Mq6L0uIsxF1C48AQTY8bUFJEEAAAQQQCIVAob2yjI8NRfMlqhAE2UQ1N5VFAAEEEEAgWKDQXlmCbLAtR5RWgCBbWk+uhgACCCCAQOQFCg2yPOgV+aaPXAUIspFrMgqMAAIIIIBA+QXyHV7Ag17lbxPuUFuAIFvbhC0IIIAAAggkXiDfXlke9Er8V6YqAATZqrBzUwQQQAABBMIvkE+vLONjw9+ecSwhQTaOrUqdEEAAAQQQKIFAPr2yBNkSgHOJvAUIsnmTcQICCCCAAALJEMgnyPKgVzK+E2GrJUE2bC1CeRBAAAEEEAiRQC7DC3jQK0QNlrCiEGQT1uBUFwEEEEAAgXwEcumV5UGvfEQ5tpQCBNlSanItBBBAAAEEYigQ1CvL+NgYNnpEqkSQjUhDUUwEEEAAAQSqJRDUK0uQrVbLcF+CLN8BBBBAAAEEEKhToK4gy/jYOunYWWYBgmyZgbk8AggggAACcRDINryAIBuH1o1uHQiy0W07So4AAggggEDFBLL1yvKgV8WagBtlECDIZkBhEwIIIIAAAgjUFsjUK8v42NpObKmcAEG2ctbcCQEEEEAAgUgLZOqV5UUIkW7SyBeeIBv5JqQCCCCAAAIIVEYgPcgyPrYy7twluwBBNrsNexBAAAEEEEAgTcAdXkCQTcPhY8UFCLIVJ+eGCCCAAAIIRFfA7ZXdo/Wucto2J0e3MpQ88gIE2cg3IRVAAAEEEECgsgK2V/a4LkNl/zYDKntz7oaAI0CQdTBYRQABBBBAAIFggV9P/L18+fUiYcaCYCuOKK8AQba8vlwdAQQQQACB2AkQZGPXpJGtEEE2sk1HwRFAAAEEEKiOwMgpV8v7iz+gR7Y6/NzVESDIOhisIoAAAggggECwgH3gi7d6BVtxRHkFCLLl9eXqCCCAAAIlFpj7xVfminMXLirxlblcrgIzln7o9cjOkO5Nukm3zbvkehrHlVGgfctm0r5F0zLeIZyXJsiGs10oFQIIIIDAWgENruOnz5S5C2sCLDAIIFC3QL9tO0m/Hsn4BYMgW/d3gb0IIIAAAlUSyBZgGzZsaErUoEGDKpWM2yIQPoGVK1fKihUrUgqWhEBLkE1pcj4ggAACCIRBYPy0D71e2Fl+UTS8tmrVSgivPgkrCGQUWLBggdluf+qHU/rtHNthBwTZjF8DNiKAAAIIVEvADbEE2Gq1AveNuoANsvZnXMMsQTbq31TKjwACCMRIwA2x2gOr/7EggEDhAhpk4xxmCbKFfzc4EwEEEECgxAKXPPiUuSIhtsSwXC7RAjbMtm/ZVE7ZZ+dYWRBkY9WcVAYBBBCIroDtjSXERrcNKXl4BaZOnWoKF7chBgTZ8H7nKBkCCCCQKAHbG9urV69E1ZvKIlAJgbj2yhJkK/Ht4R4IIIAAAnUK0BtbJw87ESiJQBx7ZQmyJflqcBEEEEAAgWIEbp/wmnnhQadOnZhiqxhIzkWgDoHZs2ebuWbjNL8sQbaOBmcXAggggEBlBBhWUBln7pJsAX1pwqxZsyROD30RZJP9nab2CCCAQCgECLI1zfDjDz/IjDcmivwo0rBpM2nTpXso2qeUhVi9Yrl8PGOafPrB+7Lsqy+lYeOm0m7b7aRz350Kvs2qZUvlo/feNee3bNdBmrbe0qx/v2aNfPjWa2Z98xYtZYuOyXhtazZIgmw2GbYjgAACCCBQoIC+ivb28a+JvvygY8eOBV4lHqet+eZr+dtPjzGV6dxnRxl6wcUpFfv266/llTEPyJadu0rXHXZN2ReFDxpib//DBbJk4fyU4u5x2NGy91E/SdmWz4c570yW+0ZeYk7Z9yfDZOeDDjPryxcvkn+de6pZ77lnPxl85nCznuR/7DjZS4YeGAsGemRj0YxUAgEEEIiuAEF2XdvVFWQXfjRX7r3iD6K9j4NO/4X02nvfdSdGZO3xf18j77w4rlZph5xzgWy72161tue6gSCbq5QIQTZ3K45EAAEEEEAgUIAgu45I/xR+7xV/lB9//EG27rat9DvmJH/n+6++LP+99krzOapB9uYRP5cvP/3Y1OGo838nnXrvIOutv75fx0JX5s+ZKc/+5xZz+k4HDJbuu+xh1umRrS1KkK1twhYEEEAAAQQKFiDI5kYXhyD7lxOGmMo2bb2VnPG363OreBFHEWRr4xFka5uwBQEEEEAAgYIFShlk9U/z36xaJQ2bNM1aHg03G2y4kWzSsFHWY9J3/PD99+bBJD2n/iabpu/O6fP3330neu/NmjYvqBcy7yD744+yYsliqbdePWmweZOcyugepONZtbe00Pq617LrNsi26bqNnPiHK+zmrD/XfPuNrFj8lay/wYbeQ2FN8narZJDVsn67elWgtY5zXr18mWzWrLnXNutlrXv6jhqLxd71G8tGG2+cvjvnzwTZnKk4EAEEEEAAgWCBQoLsG888LpOff8qErJP+OFLefO4JmT7xRfnkg/fMDRt5YbFz3x1lryOOM//j1+Aw4aG7ZebkN2S596S8LhttvInsc/QJsv2+B2UMSBpe33nxeXnjmbGi41Ptotdut21P2WfoCV4YaWE3+z9t2Tasv7Gccunf5O3xz8q7L0/wnqp/xz9mq87dZLdDj5Iu26e+9/67Nd/KHX/8tei92/foJfuddLosW/Sl3H/lJbLKq4OOj9Vl0802l00bbSaNW7Sq9UDYvOnvyEv/vV/0z+3ffr3aHK911aEKuw8ZKhoi05dXx/5Xpr7wvGzcoKEcfu6F8ugNV8u86TWvNNWHznY68FBp37N3+mk5fdYgqmN7dbHDCnS9+VZb6w/pscc+svuhQ826/jPj9YkydcJzssAzt21ld3bstb30O/pEadU+9aHAT2fOkCduvtYcpq499+hn1usKsg/9/TJZvOBzadSkmRx70Z/sLVJ+vvnsWHnruSfNtiPOu0iabdnGrGtbPn37v9duHyGvPfWYTPnf0+azzoyg45e3H3iQ+az/aLtNePAumTttasqDbtoWfQbsL9vtOcA/1l3RX8pee2qM9x16LsVCv4N6n72PPE5abN3OPSVwnSAbSMQBCCCAAAII5C5QSJAdd/8dMumxR8xNNDDYsJF+13bb9pJDzjjPBEE3RLnH7XnEsSbwutu0J1PDoxtg3f12ffBZw/3QZLe5Zdvt0CNl4qMP2121fmoo0+Bll0wPey36/FO56ddn20NSfjZu2VrO+ntNoNLwO/amf3qheXzKMekfdh10uBfgT0wJ78/ffau89uSj5lANSJ/P/jDltGLG5C5eMF9u/NUZKddzP2j7HXDKmaI9jo9cc4XMnvqWuzvjevrDYYU87HXD+WeYUKm/FJx3/Z0Z7zP+gTv99hv256ukdYfO5rgP3pwkD1/9F7O+dbceZjox9wLb7dXf+9790mya9fabMvrav/q/VLjH2XUd03vQT8+RjTdtYDeJ/lJz3xWX1Lq2f8DalYNPO0d699s/fXPWzwTZrDTsQAABBBBAIH+BYoOsvWOvvQdIW28+0pVLl8i4e++wm/2f2gO4w36DZIONNpT3X31FNGDYRYOMBhpddC7X+6/8k8x5d4r5rL2Zex5+jOkFXLVsmbz70riUc7XX1Z2f1A2y5gLeP30GHGB6RFctW+L1HL+UEhR/eeNd/jCHTEFWt814Y5LpbdZeaF20ru169DY90rZXV6flmvDg3Wa//rPLwUM8j16mPnO9umjPsl32/cmp3hRVNeNVdZsbZO0xGpK1J1F7dc+/6V6pv2lhQyo0oGovqy6PeT29umiPYr9jTjTrzbbYyvjpLyNP336j2aZtob3Hrdt1lHr16smi+Z+ZX1y+mv+p2a9lO+Ov1/thvJpB1hTI+0e/J1punVrsuN9c6vWo9zbrGpjtog+39e430OtN31wWfjzX9JzbXva++x4oBw47yx7q1fdhGXd/TcDW79euhxxu/gLwxScfmblxP3zzVf/YC257QDbcqL7/ua4VgmxdOuxDAAEEEEAgT4FSBNnDf3GhdN+55kl1vf17k16U0f/6m1+SDj37yNBf/V7W33BDs017L3V2APvn/hN+f7ls3b2H2Tf5+SflqVE1gUoD009+938pQwh+9MaevvTIvSaE6Amt23eSk//0Vz9UpQdZPb/tNtuZa+s/Gkzv+vNvZf7cWWbboNPP9YLpQLOeKciaHd4/dY2R1Z7jW397nj3UK/Nl3j17+p91RYP7A3+91N/283/eagKlbkgPsjqjgAZkdVr65UJp0moL/7xiVrKNkVXTG391pv9n99Ov/Jf3Z/yaoQf2fhqI/33B2f6f2H92xbXSvE1bs7vaQXbXwUdIv6EnmjGvOlxBh3zo+NeHvR7mD/QFF95i5so98njxkrn5rP/YYRf2rwWn/t/V/rCJey6/2B/ecfY1N8vmzVv65+mKuz+9hzrlwLQPBNk0ED4igAACCCBQjECxQVZ7Jwedvi7EaVncQKiff3r5P6Rl2/a66i9Txj0tT95a8+S8O0RAe2Ptn7eP/OVF0nXH3fxz7IoGvJtHnCu2h9ANjm6QzVQ2vYYblvc+6ngv5NS8BMEtd/oLEeoKspMef0TG3VfTC73TgYNl4Amn2aKm/Hzy1utkyrhnzDb3vm6Q7bH7PnLo2eennFeqD9mCrI6FffqOm2TRp594PdfbyME/OzfjLbXH1g4jOemSK0XHGutSzSCrvctnXXWj/0uSLfiizz6Rmy48x3zUvwbodzDTVGNu2Xv3208OPu3n5hz9xcQObXFDu73+F5/Mk1nemG/9ZUt7bPXNZbksBNlclDgGAQQQQACBHAWKDbLZeqNsaNJiXPSf0eZP1G6RZrz+ijzyj5Fm00E/PVv69D/A9ECOPPkIs82MnbzOC4dOD5p7vj4I9IwXvnSx5+u6G2SzjS11e0fdt1oVGmQf/NufZeaUN/T2crr3J3f9c32mxe25dQOrG2T3P/l0MwQj0/nFbrNtkuusBfZ+33izAXw+60N55dGH/F7KEy/+i7TxHmDTxQ2Dub7Zq1RjZPUXHf2FJ33RP/0/dPXlZvPu3jhoHZecadG5g68cVjNOWoO5BnRd3O+RftaxxPqwW9vuPQse5qHXIciqAgsCCCCAAAIlEig2yLq9oW6RrjrtWDO+M9ucpR++9Zrok+u62CCqf+LVSft10eEAOiwg2+KGJx1vquNOdXEDyPG//bM3w0GvWpfQh6n0Va26uCGn0CBr66rXu3DUQ7V6B3W7LjreVY/VRXuotZdQFzfIHv3rP5gXFZgdJf4nlyCrD4fNnTZFvvh4nnz1+Wfe7AVz/Nka3OKccLE3HMR70EoXty0qHWR1WEH/Y052i2bWX31itPzvnlG1tte1QcfZ/uqW+8whX3njgv9z6UUZ667fza477iLddtw1ZdhLXde2+wiyVoKfCCCAAAIIlECg2CDrPk3uFseGO/2z7s9G/svdZdbdHjMbZDU83fKbX5j93XbaXY44b0St8+yGBXNny22/H24+ur2bbpAd9n9/N2No7Tn25+fe1Fi3X/wr87HUQfY3d42xt8n404ZJNzS5QTbTMIyMFypgo713ph7Zr1etlKe8oR7vvfpSTlcOS5DN1oP9zB3/ljeffSKnurgHaZDVttFFpw8b++9/+g8eusfZ9QHHneI92HdY1r8c2OPsT4KsleAnAggggAACJRAoOshmCYuFBFl33lF9QCzb/KJabbcXcIf9Dpb9T655Or0aQdb+mVzLNeKORzKOxdR9bo+v21PtBtnTrvintGjTTg8v+ZItyOqf1+/zxibbh+/0xmbspzfdVYut23oPQHWSrbtu6z1gd583TVhNUK9EkHVd3F+Y3Om3Dhx2pvT15iJOX17UBwIfqeld1RkJ2mfomU8/Rz939XpZ08fS6gN3s72H9WZNnSzubAX2fJ0BYrfB66Zxs9sz/STIZlJhGwIIIIAAAgUKhCnI6tPzV5zo9W55iz7Eo0/2Z1veHv+MPHHLdWb3Xt7E9HseXvMn+2oEWR0ioUMldDnnH7dk/XOz+wCS/llew6AubmCrRpB1xwxrb+TRv77YHzZgCrj2H511wU6b5s4G4f5Ske/QArdn2r2Xrut0YXZe3nyDrJ5npxtLny84/T51fdYHC91gq6H/kw/f9+a3fcjvqXWHidR1Ld1HkA0SYj8CCCCAAAJ5CIQpyGqx3afF3QeKUqrkBd7bvTdw2RcH2Omq9JjKBNl1U3bpPf93z23y6hM1PZX6tjL3TVm63y7jvXlJJ3rzk+qi88wOOL5mXG+1g6z74oEBxw+r+VO5LfTanxrorj13mD9m1M7VqrsLCbI6FtpOe5VpHla93/XDT/en+8o3yOrbxu685EJTeg2aw/7895RAurZaZnqzWy76hZmDtmXbDubBMZ3Ca+xN18qCebO9h7t61Hp7m56rb6u75qx1D5BlqoO9h/uTIOtqsI4AAggggECRAmELsu5sBPoUuQYmfd2su+hLER678Rp/069ve9B70cJG5nO5gqz7cFr/Y0/2JsivmV1Bb+qO19UexlMvu8ab+7W1Xz5dWfTZx950UDUPsunnE71X+7bp0l1Xq94j6wbx9BcDaPm0p/zJW/4lb3uvrrWL+1BaIUHWnWYtU/hPH+Oab5BNn6Kt/3Femw1a12a2XmO8+Y7tuGA7RMUdAqLH6TzFW3bqaqtufn69coVcd95p5gG+bOPAU05Y+4Egm0mFbQgggAACCBQoELYgqwFEe2Vtb52OJd33hFOlVbsOsmrpUpn+6ov+63G1ykMvuFh0zle7lCvI6vjRuy/7vbmNTg2280GHysYNGkrfAQeabe4cqxpmB554munN053zpk2VJ2+rmTNXP7sPmOnnavfITp/4goy57iotiln0lbX6Fix9m5j2bOovDtMnvmh3m5/u3L+FBFkNxU/cfK1/zb28VxXrSzFWL1/uvUltYq375Rtk9cLzpk81Ly6wN9F5hbcfeLA02LyxfPT+NJn28gR/zmI9xp067bEbvWENL403p+owlz799zPTb+nLFvTtXjpvsP2LgJZ9zyOOM8cG/UOQDRJiPwIIIIAAAnkIhC3IatG1h/NBb9ypTtRf13LU8N9Klx12STmkXEE2/U/J9qYX3Hq/6TFevWK5mRfXfWDKHuP+1Fed9j/2FHdT1YPs9999Z6aassEspXDOB7W2Dzu5PbeFBFnt9bzlN+f5bxNzbuOvavCc+sL/zOdCgqye+PLo++WFh+7xr5ltJf0XIu1xHeXNbKGvvK1r0V+iBnsvsNh40wZ1HebvI8j6FKwggAACCCBQvEAhQfaFh+72AsID5ubZpouysxZkexBGXyCgLxLQRd8k1XufmtfEmg3ePzrnqo4pzTSFkg450Ae8OmzX1x7u/8ylbO5QgD0PP8a7lvfqUm/R17D+7dSjzbqGNg3K7jL33bdlzPVX+eNEdZ9bf/0T/FvPPWFen7tq2VL3VPPwmobYHfcbVGuqpnH33i6Txv7XHH/6ldd5r4dtk3JuqT7YWQvcB83stVcuXSITHrxL3h7/rN3k/9TX5eo8vZs1ayFXnnKk2W4e0rr5XlOXudPelnv/8gezXXuidzpgsFlfsWSxXPvzU8y6tm/6G8O+WbVKnrnz337PpznQ+0e/M/t644frrVfP71F1Xx87c/Lr8uBVNXMMH3zaOdK73/721Iw/dbq1p73XHmcK6lo3/Q609mZoSF/0FbYvj3nAf5uZu197aXv3Gyh7DDk649hb91h3nSDrarCOAAIIIIBAkQKFBNkib5nf6V44XLroC1k8/3PZoH597533LaRRk2b5XaOUR3vlWfLFAu+FY+vJJo0a+XOOpt9Ce3C/9MbF/vD9D14AbC76J+l6662XfljoPmtP5NIvFsrKZUs865beWN8t8gpqhVRIh5PoUJLVK5Z5Q0g6miEbhVwn6BydceCrBZ95Pf2LpGHjJiaY6/CQoEV7rPUBMJ2GayNvvHbzNm1lk4aNgk7LuJ8gm5GFjQgggAACCBQmEPogW1i1OAuBUAoQZEPZLBQKAQQQQCCqAgTZqLYc5Y6iAEE2iq1GmRFAAAEEQitAkA1t05iC6YNUn3oT8Be81Ktn5rV1J/Uv+FqcWLQAQbZoQi6AAAIIIIDAOgGC7DqLMK49f/dt/mthCy1frpP1F3p9zstdgCCbuxVHIoAAAgggkJPAJQ8+ZY7r1atXTsdzUOUE9JWwH3tznha61PN6ZHVWBnpkCxUs3XkrV66UWbNmSfuWTeWUfXYu3YWreKV63lQdP1bx/twaAQQQQAABIcjyJUCg/AILFiwQ/a/ftp2kX48u5b9hBe5AkK0AMrdAAAEEEKhb4PYJr8nchV9Jp06dpEGD3CZ2r/uK7EUAgXQBgmy6CJ8RQAABBBAogYANsg0bNpSOHTuW4IpcAgEE0gXs+Fh6ZNNl+IwAAggggEARAvaBL70EvbJFQHIqAlkEbG+s7r5k6IFZjoreZoYWRK/NKDECCCAQSwF6ZWPZrFQqJAJx7I1VWoJsSL5gFAMBBBBIugC9skn/BlD/cgnEtTdWvQiy5frWcF0EEEAAgbwFxk/7UMZPn2XOY4hB3nycgEAtATfEntJvZ2nfommtY6K8gSAb5daj7AgggEAMBdww26pVK9H/WBBAIH8BN8TG6QEvV4Ig62qwjgACCCAQCgE3zGqBCLShaBYKEREBN8BqkeMaYrVuBFlVYEEAAQQQCJ2Ahlld7FADXdfpuZhnViVYEEgV0Ld26bJixQp/h77Bq9+2nWM3nMCvoLdCkHU1WEcAAQQQCJ1ApkAbukJSIARCJJCEAGu5CbJWgp8IIIAAAqEX0JkN5i5cFPpyUkAEKi3QvmUzc8u4PcwV5EiQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBQiyoWwWCoUAAggggAACCCAQJECQDRJiPwIIIIAAAggggEAoBf4f394ZCGel78cAAAAASUVORK5CYII=)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -778,7 +551,15 @@ "name": "python3" }, "language_info": { - "name": "python" + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/docs/docs/getting_started/concepts.md b/docs/docs/getting_started/concepts.md index 09a2375c5a342..cfca32f608ef8 100644 --- a/docs/docs/getting_started/concepts.md +++ b/docs/docs/getting_started/concepts.md @@ -6,7 +6,7 @@ This is a quick guide to the high-level concepts you'll encounter frequently whe There are endless use cases for data-backed LLM applications but they can be roughly grouped into four categories: -[**Structured Data Extraction**](../use_cases/extraction/) +[**Structured Data Extraction**](../use_cases/extraction.md/) Pydantic extractors allow you to specify a precise data structure to extract from your data and use LLMs to fill in the missing pieces in a type-safe way. This is useful for extracting structured data from unstructured sources like PDFs, websites, and more, and is key to automating workflows. [**Query Engines**](../module_guides/deploying/query_engine/index.md): diff --git a/docs/docs/index.md b/docs/docs/index.md index 78c7cf2cdd29e..e76155f250bc6 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -2,7 +2,7 @@ # Welcome to LlamaIndex 🦙 ! -LlamaIndex is a framework for building context-augmented generative AI applications with [LLMs](https://en.wikipedia.org/wiki/Large_language_model) including [agents](./understanding/agent/basic_agent/) and workflows. +LlamaIndex is a framework for building context-augmented generative AI applications with [LLMs](https://en.wikipedia.org/wiki/Large_language_model) including [agents](./understanding/agent/basic_agent/) and [workflows](./understanding/workflows/).
@@ -44,13 +44,13 @@ The most popular example of context-augmentation is [Retrieval-Augmented Generat ### What are agents? -Agents are LLM-powered knowledge assistants that use tools to perform tasks like research, data extraction, and more. Agents range from simple question-answering to being able to sense, decide and take actions in order to complete tasks. +[Agents](./understanding/agent/basic_agent/) are LLM-powered knowledge assistants that use tools to perform tasks like research, data extraction, and more. Agents range from simple question-answering to being able to sense, decide and take actions in order to complete tasks. LlamaIndex provides a framework for building agents including the ability to use RAG pipelines as one of many tools to complete a task. ### What are workflows? -Workflows are multi-step processes that combine one or more agents, data connectors, and other tools to complete a task. They are event-driven software that allows you to combine RAG data sources and multiple agents to create a complex application that can perform a wide variety of tasks with reflection, error-correction, and other hallmarks of advanced LLM applications. +[Workflows](./understanding/workflows/) are multi-step processes that combine one or more agents, data connectors, and other tools to complete a task. They are event-driven software that allows you to combine RAG data sources and multiple agents to create a complex application that can perform a wide variety of tasks with reflection, error-correction, and other hallmarks of advanced LLM applications. ### LlamaIndex is the framework for Context-Augmented LLM Applications diff --git a/docs/docs/indices/create_and_refine.png b/docs/docs/indices/create_and_refine.png deleted file mode 100644 index a7d2507617a01..0000000000000 Binary files a/docs/docs/indices/create_and_refine.png and /dev/null differ diff --git a/docs/docs/indices/keyword.png b/docs/docs/indices/keyword.png deleted file mode 100644 index ab6641867a076..0000000000000 Binary files a/docs/docs/indices/keyword.png and /dev/null differ diff --git a/docs/docs/indices/keyword_query.png b/docs/docs/indices/keyword_query.png deleted file mode 100644 index 5595ce4b98a4d..0000000000000 Binary files a/docs/docs/indices/keyword_query.png and /dev/null differ diff --git a/docs/docs/indices/list.png b/docs/docs/indices/list.png deleted file mode 100644 index 595a93936fff3..0000000000000 Binary files a/docs/docs/indices/list.png and /dev/null differ diff --git a/docs/docs/indices/list_filter_query.png b/docs/docs/indices/list_filter_query.png deleted file mode 100644 index 53bc6067a78b6..0000000000000 Binary files a/docs/docs/indices/list_filter_query.png and /dev/null differ diff --git a/docs/docs/indices/list_query.png b/docs/docs/indices/list_query.png deleted file mode 100644 index e94ac5a354cd6..0000000000000 Binary files a/docs/docs/indices/list_query.png and /dev/null differ diff --git a/docs/docs/indices/tree.png b/docs/docs/indices/tree.png deleted file mode 100644 index 3385fa7af5e10..0000000000000 Binary files a/docs/docs/indices/tree.png and /dev/null differ diff --git a/docs/docs/indices/tree_query.png b/docs/docs/indices/tree_query.png deleted file mode 100644 index 4f3488e13d2e5..0000000000000 Binary files a/docs/docs/indices/tree_query.png and /dev/null differ diff --git a/docs/docs/indices/tree_summarize.png b/docs/docs/indices/tree_summarize.png deleted file mode 100644 index b93290571aa39..0000000000000 Binary files a/docs/docs/indices/tree_summarize.png and /dev/null differ diff --git a/docs/docs/indices/vector_store.png b/docs/docs/indices/vector_store.png deleted file mode 100644 index 37aa5de2a14b6..0000000000000 Binary files a/docs/docs/indices/vector_store.png and /dev/null differ diff --git a/docs/docs/indices/vector_store_query.png b/docs/docs/indices/vector_store_query.png deleted file mode 100644 index de2c1948cdcaf..0000000000000 Binary files a/docs/docs/indices/vector_store_query.png and /dev/null differ diff --git a/docs/docs/integrations/TonicValidate-Graph.png b/docs/docs/integrations/TonicValidate-Graph.png deleted file mode 100644 index c2328da776d78..0000000000000 Binary files a/docs/docs/integrations/TonicValidate-Graph.png and /dev/null differ diff --git a/docs/docs/integrations/arize_phoenix.png b/docs/docs/integrations/arize_phoenix.png deleted file mode 100644 index 77d61a15a456a..0000000000000 Binary files a/docs/docs/integrations/arize_phoenix.png and /dev/null differ diff --git a/docs/docs/integrations/honeyhive.png b/docs/docs/integrations/honeyhive.png deleted file mode 100644 index 259564acdfad3..0000000000000 Binary files a/docs/docs/integrations/honeyhive.png and /dev/null differ diff --git a/docs/docs/integrations/openllmetry.png b/docs/docs/integrations/openllmetry.png deleted file mode 100644 index a1308ef9b0760..0000000000000 Binary files a/docs/docs/integrations/openllmetry.png and /dev/null differ diff --git a/docs/docs/integrations/perfetto.png b/docs/docs/integrations/perfetto.png deleted file mode 100644 index a3d77afd1425d..0000000000000 Binary files a/docs/docs/integrations/perfetto.png and /dev/null differ diff --git a/docs/docs/integrations/trulens.png b/docs/docs/integrations/trulens.png deleted file mode 100644 index a0630e7bcdbf9..0000000000000 Binary files a/docs/docs/integrations/trulens.png and /dev/null differ diff --git a/docs/docs/integrations/wandb.png b/docs/docs/integrations/wandb.png deleted file mode 100644 index 86174b887a7ee..0000000000000 Binary files a/docs/docs/integrations/wandb.png and /dev/null differ diff --git a/docs/docs/module_guides/deploying/agents/tools.md b/docs/docs/module_guides/deploying/agents/tools.md index b8c72c8454b2f..a9948eff4343e 100644 --- a/docs/docs/module_guides/deploying/agents/tools.md +++ b/docs/docs/module_guides/deploying/agents/tools.md @@ -13,7 +13,7 @@ We offer a few different types of Tools: - `FunctionTool`: A function tool allows users to easily convert any user-defined function into a Tool. It can also auto-infer the function schema. - `QueryEngineTool`: A tool that wraps an existing [query engine](../query_engine/index.md). Note: since our agent abstractions inherit from `BaseQueryEngine`, these tools can also wrap other agents. - Community contributed `ToolSpecs` that define one or more tools around a single service (like Gmail) -- Utiltiy tools for wrapping other tools to handle returning large amounts of data from a tool +- Utility tools for wrapping other tools to handle returning large amounts of data from a tool ## FunctionTool diff --git a/docs/docs/module_guides/indexing/lpg_index_guide.md b/docs/docs/module_guides/indexing/lpg_index_guide.md index 505edb5b7bfd9..18ecfcf5afbbd 100644 --- a/docs/docs/module_guides/indexing/lpg_index_guide.md +++ b/docs/docs/module_guides/indexing/lpg_index_guide.md @@ -339,7 +339,7 @@ To illustrate how this works, here is a small example: ```python # NOTE: current v1 is needed -from pydantic.v1 import BaseModel, Field +from pydantic import BaseModel, Field from llama_index.core.indices.property_graph import CypherTemplateRetriever # write a query with template params @@ -369,12 +369,13 @@ template_retriever = CypherTemplateRetriever( Currently, supported graph stores for property graphs include: -| | In-Memory | Native Embedding Support | Async | Server or disk based? | -|---------------------|-----------|--------------------------|-------|-----------------------| -| SimplePropertyGraphStore | ✅ | ❌ | ❌ | Disk | -| Neo4jPropertyGraphStore | ❌ | ✅ | ❌ | Server | -| NebulaPropertyGraphStore | ❌ | ❌ | ❌ | Server | -| TiDBPropertyGraphStore | ❌ | ✅ | ❌ | Server | +| | In-Memory | Native Embedding Support | Async | Server or disk based? | +|------------------------------|------------|--------------------------|-------|-----------------------| +| SimplePropertyGraphStore | ✅ | ❌ | ❌ | Disk | +| Neo4jPropertyGraphStore | ❌ | ✅ | ❌ | Server | +| NebulaPropertyGraphStore | ❌ | ❌ | ❌ | Server | +| TiDBPropertyGraphStore | ❌ | ✅ | ❌ | Server | +| FalkorDBPropertyGraphStore | ❌ | ✅ | ❌ | Server | ### Saving to/from disk diff --git a/docs/docs/module_guides/models/embeddings.md b/docs/docs/module_guides/models/embeddings.md index 3eaef138dde93..fab8dc24ebe85 100644 --- a/docs/docs/module_guides/models/embeddings.md +++ b/docs/docs/module_guides/models/embeddings.md @@ -167,9 +167,9 @@ class InstructorEmbeddings(BaseEmbedding): instruction: str = "Represent the Computer Science documentation or question:", **kwargs: Any, ) -> None: + super().__init__(**kwargs) self._model = INSTRUCTOR(instructor_model_name) self._instruction = instruction - super().__init__(**kwargs) def _get_query_embedding(self, query: str) -> List[float]: embeddings = self._model.encode([[self._instruction, query]]) diff --git a/docs/docs/module_guides/observability/index.md b/docs/docs/module_guides/observability/index.md index e633cb0541135..37658a1bec70a 100644 --- a/docs/docs/module_guides/observability/index.md +++ b/docs/docs/module_guides/observability/index.md @@ -164,6 +164,64 @@ set_global_handler("literalai") ![](../../_static/integrations/literal_ai.gif) +### Comet Opik + +[Opik](https://www.comet.com/docs/opik/?utm_source=llama-index&utm_medium=docs&utm_campaign=opik&utm_content=home_page) is an open-source end to end LLM Evaluation Platform built by Comet. + +To get started, simply sign up for an account on [Comet](https://www.comet.com/signup?from=llm&utm_medium=github&utm_source=llama-index&utm_campaign=opik) and grab your API key. + +#### Usage Pattern + +- Install the Opik Python SDK with `pip install opik` +- In Opik, get your API key from the user menu. +- If you are using a self-hosted instance of Opik, also make note of its base URL. + +You can configure Opik using the environment variables `OPIK_API_KEY`, `OPIK_WORKSPACE` and `OPIK_URL_OVERRIDE` if you are using a [self-hosted instance](https://www.comet.com/docs/opik/self-host/self_hosting_opik). You can set these by calling: + +```bash +export OPIK_API_KEY="" +export OPIK_WORKSPACE="" + +# Optional +#export OPIK_URL_OVERRIDE="" +``` + +You can now use the Opik integration with LlamaIndex by setting the global handler: + +```python +from llama_index.core import Document, VectorStoreIndex, set_global_handler + +# You should provide your OPIK API key and Workspace using the following environment variables: +# OPIK_API_KEY, OPIK_WORKSPACE +set_global_handler( + "opik", +) + +# This example uses OpenAI by default so don't forget to set an OPENAI_API_KEY +index = VectorStoreIndex.from_documents([Document.example()]) +query_engine = index.as_query_engine() + +questions = [ + "Tell me about LLMs", + "How do you fine-tune a neural network ?", + "What is RAG ?", +] + +for question in questions: + print(f"> \033[92m{question}\033[0m") + response = query_engine.query(question) + print(response) +``` + +You will see the following traces in Opik: + +![Opik integration with LlamaIndex](../../_static/integrations/opik.png) + +#### Example Guides + +- [Llama-index + Opik documentation page](https://www.comet.com/docs/opik/tracing/integrations/llama_index?utm_source=llamaindex&utm_medium=docs&utm_campaign=opik) +- [Llama-index integration cookbook](https://www.comet.com/docs/opik/cookbook/llama-index?utm_source=llama-index&utm_medium=docs&utm_campaign=opik) + ## Other Partner `One-Click` Integrations (Legacy Modules) These partner integrations use our legacy `CallbackManager` or third-party calls. diff --git a/docs/docs/module_guides/querying/node_postprocessors/node_postprocessors.md b/docs/docs/module_guides/querying/node_postprocessors/node_postprocessors.md index 42918eb6e978a..9503f638ef26f 100644 --- a/docs/docs/module_guides/querying/node_postprocessors/node_postprocessors.md +++ b/docs/docs/module_guides/querying/node_postprocessors/node_postprocessors.md @@ -300,10 +300,11 @@ Full notebook guide is available [here](../../../examples/node_postprocessor/Col Uses models from [rankLLM](https://github.com/castorini/rank_llm) to rerank documents. Returns the top N ranked nodes. ```python -from llama_index.postprocessor import RankLLMRerank +from llama_index.postprocessor.rankllm_rerank import RankLLMRerank -postprocessor = RankLLMRerank(top_n=5, model="zephyr") -postprocessor.postprocess_nodes(nodes) +# RankZephyr reranker, return top 5 candidates +reranker = RankLLMRerank(model="rank_zephyr", top_n=5) +reranker.postprocess_nodes(nodes) ``` A full [notebook example is available](../../../examples/node_postprocessor/rankLLM.ipynb). diff --git a/docs/docs/module_guides/querying/structured_outputs/index.md b/docs/docs/module_guides/querying/structured_outputs/index.md index d231caae60a21..6e44510b9f86d 100644 --- a/docs/docs/module_guides/querying/structured_outputs/index.md +++ b/docs/docs/module_guides/querying/structured_outputs/index.md @@ -26,7 +26,7 @@ append format instructions to the prompt. After the LLM call, the output parser With function calling APIs, the output is inherently in a structured format, and the input can take in the signature of the desired object. The structured output just needs to be cast in the right object format (e.g. Pydantic). ## Starter Guide -- [Simple Guide to Structured Outputs](../examples/structured_outputs/structured_outputs.ipynb) +- [Simple Guide to Structured Outputs](../../../examples/structured_outputs/structured_outputs.ipynb) ## Other Resources diff --git a/docs/docs/module_guides/storing/chat_stores.md b/docs/docs/module_guides/storing/chat_stores.md index 9e3c493bbf5db..22c3b94716918 100644 --- a/docs/docs/module_guides/storing/chat_stores.md +++ b/docs/docs/module_guides/storing/chat_stores.md @@ -97,3 +97,67 @@ chat_engine = SimpleChatEngine( response = chat_engine.chat("Hello.") ``` + +## DynamoDBChatStore + +Using `DynamoDBChatStore`, you can store your chat history in AWS DynamoDB. + +### Installation +```bash +pip install llama-index-storage-chat-store-dynamodb +``` + +### Usage +Ensure you have a DynamoDB table created with the appropriate schema. By default, here is an example: +```python +import boto3 + +# Get the service resource. +dynamodb = boto3.resource("dynamodb") + +# Create the DynamoDB table. +table = dynamodb.create_table( + TableName="EXAMPLE_TABLE", + KeySchema=[{"AttributeName": "SessionId", "KeyType": "HASH"}], + AttributeDefinitions=[ + {"AttributeName": "SessionId", "AttributeType": "S"} + ], + BillingMode="PAY_PER_REQUEST", +) +``` + +You can then use the `DynamoDBChatStore` class to persist and retrieve chat histories: +```python +import os +from llama_index.core.llms import ChatMessage, MessageRole +from llama_index.storage.chat_store.dynamodb.base import DynamoDBChatStore + +# Initialize DynamoDB chat store +chat_store = DynamoDBChatStore( + table_name="EXAMPLE_TABLE", profile_name=os.getenv("AWS_PROFILE") +) + +# A chat history, which doesn't exist yet, returns an empty array. +print(chat_store.get_messages("123")) +# >>> [] + +# Initializing a chat history with a key of "SessionID = 123" +messages = [ + ChatMessage(role=MessageRole.USER, content="Who are you?"), + ChatMessage( + role=MessageRole.ASSISTANT, content="I am your helpful AI assistant." + ), +] +chat_store.set_messages(key="123", messages=messages) +print(chat_store.get_messages("123")) +# >>> [ChatMessage(role=, content='Who are you?', additional_kwargs={}), +# ChatMessage(role=, content='I am your helpful AI assistant.', additional_kwargs={})]] + +# Appending a message to an existing chat history +message = ChatMessage(role=MessageRole.USER, content="What can you do?") +chat_store.add_message(key="123", message=message) +print(chat_store.get_messages("123")) +# >>> [ChatMessage(role=, content='Who are you?', additional_kwargs={}), +# ChatMessage(role=, content='I am your helpful AI assistant.', additional_kwargs={})], +# ChatMessage(role=, content='What can you do?', additional_kwargs={})] +``` diff --git a/docs/docs/module_guides/workflow/index.md b/docs/docs/module_guides/workflow/index.md index 160f1a969eed1..cb44643d397fa 100644 --- a/docs/docs/module_guides/workflow/index.md +++ b/docs/docs/module_guides/workflow/index.md @@ -187,14 +187,14 @@ from llama_index.core.workflow import Context @step async def query(self, ctx: Context, ev: MyEvent) -> StopEvent: # retrieve from context - query = ctx.data.get("query") + query = await ctx.get("query") # do something with context and event val = ... result = ... # store in context - ctx.data["key"] = val + await ctx.set("key", val) return StopEvent(result=result) ``` @@ -234,7 +234,7 @@ Using `ctx.collect_events()` we can buffer and wait for ALL expected events to a ## Manually Triggering Events -Normally, events are triggered by returning another event during a step. However, events can also be manually dispatched using the `self.send_event(event)` method within a workflow. +Normally, events are triggered by returning another event during a step. However, events can also be manually dispatched using the `ctx.send_event(event)` method within a workflow. Here is a short toy example showing how this would be used: @@ -256,9 +256,11 @@ class GatherEvent(Event): class MyWorkflow(Workflow): @step - async def dispatch_step(self, ev: StartEvent) -> MyEvent | GatherEvent: - self.send_event(MyEvent()) - self.send_event(MyEvent()) + async def dispatch_step( + self, ctx: Context, ev: StartEvent + ) -> MyEvent | GatherEvent: + ctx.send_event(MyEvent()) + ctx.send_event(MyEvent()) return GatherEvent() @@ -278,6 +280,63 @@ class MyWorkflow(Workflow): return StopEvent(result=events) ``` +## Retry steps execution in case of failures + +A step that fails its execution might result in the failure of the entire workflow, but oftentimes errors are +expected and the execution can be safely retried. Think of a HTTP request that times out because of a transient +congestion of the network, or an external API call that hits a rate limiter. + +For all those situation where you want the step to try again, you can use a "Retry Policy". A retry policy is an object +that instructs the workflow to execute a step multiple times, dictating how much time has to pass before a new attempt. +Policies take into consideration how much time passed since the first failure, how many consecutive failures happened +and which was the last error occurred. + +To set a policy for a specific step, all you have to do is passing a policy object to the `@step` decorator: + + +```python +from llama_index.core.workflow.retry_policy import ConstantDelayRetryPolicy + + +class MyWorkflow(Workflow): + # ...more workflow definition... + + # This policy will retry this step on failure every 5 seconds for at most 10 times + @step(retry_policy=ConstantDelayRetryPolicy(delay=5, maximum_attempts=10)) + async def flaky_step(self, ctx: Context, ev: StartEvent) -> StopEvent: + result = flaky_call() # this might raise + return StopEvent(result=result) +``` + +You can see the [API docs](../../api_reference/workflow/retry_policy/) for a detailed description of the policies +available in the framework. If you can't find a policy that's suitable for your use case, you can easily write a +custom one. The only requirement for custom policies is to write a Python class that respects the `RetryPolicy` +protocol. In other words, your custom policy class must have a method with the following signature: + +```python +def next( + self, elapsed_time: float, attempts: int, error: Exception +) -> Optional[float]: + ... +``` + +For example, this is a retry policy that's excited about the weekend and only retries a step if it's Friday: + +```python +from datetime import datetime + + +class RetryOnFridayPolicy: + def next( + self, elapsed_time: float, attempts: int, error: Exception + ) -> Optional[float]: + if datetime.today().strftime("%A") == "Friday": + # retry in 5 seconds + return 5 + # tell the workflow we don't want to retry + return None +``` + ## Stepwise Execution Workflows have built-in utilities for stepwise execution, allowing you to control execution and debug state as things progress. @@ -346,7 +405,19 @@ async def critique_joke(ev: JokeEvent) -> StopEvent: You can find many useful examples of using workflows in the notebooks below: -- [RAG + Reranking](../../examples/workflow/rag.ipynb) -- [Reliable Structured Generation](../../examples/workflow/reflection.ipynb) +- [Advanced Text-to-SQL](../../examples/workflow/advanced_text_to_sql.ipynb) +- [Citation Query Engine](../../examples/workflow/citation_query_engine.ipynb) +- [Common Workflow Patterns](../../examples/workflow/workflows_cookbook.ipynb) +- [Corrective RAG](../../examples/workflow/corrective_rag_pack.ipynb) - [Function Calling Agent](../../examples/workflow/function_calling_agent.ipynb) +- [JSON Query Engine](../../examples/workflow/JSONalyze_query_engine.ipynb) +- [Long RAG](../../examples/workflow/long_rag_pack.ipynb) +- [Multi-Step Query Engine](../../examples/workflow/multi_step_query_engine.ipynb) +- [Multi-Strategy Workflow](../../examples/workflow/multi_strategy_workflow.ipynb) +- [RAG + Reranking](../../examples/workflow/rag.ipynb) - [ReAct Agent](../../examples/workflow/react_agent.ipynb) +- [Reliable Structured Generation](../../examples/workflow/reflection.ipynb) +- [Router Query Engine](../../examples/workflow/router_query_engine.ipynb) +- [Self Discover Workflow](../../examples/workflow/self_discover_workflow.ipynb) +- [Sub-Question Query Engine](../../examples/workflow/sub_question_query_engine.ipynb) +- [Utilizing Concurrency](../../examples/workflow/parallel_execution.ipynb) diff --git a/docs/docs/node_postprocessors/prev_next.png b/docs/docs/node_postprocessors/prev_next.png deleted file mode 100644 index 2249cdb2943da..0000000000000 Binary files a/docs/docs/node_postprocessors/prev_next.png and /dev/null differ diff --git a/docs/docs/node_postprocessors/recency.png b/docs/docs/node_postprocessors/recency.png deleted file mode 100644 index c46c975b5c90b..0000000000000 Binary files a/docs/docs/node_postprocessors/recency.png and /dev/null differ diff --git a/docs/docs/optimizing/evaluation/component_wise_evaluation.md b/docs/docs/optimizing/evaluation/component_wise_evaluation.md index eec96ee9d37bf..be2219a66346d 100644 --- a/docs/docs/optimizing/evaluation/component_wise_evaluation.md +++ b/docs/docs/optimizing/evaluation/component_wise_evaluation.md @@ -16,7 +16,7 @@ A useful benchmark for embeddings is the [MTEB Leaderboard](https://huggingface. BEIR is useful for benchmarking if a particular retrieval model generalize well to niche domains in a zero-shot setting. -Since most publically-available embedding and retrieval models are already benchmarked against BEIR (e.g. through the MTEB benchmark), utilizing BEIR is more helpful when you have a unique model that you want to evaluate. +Since most publicly-available embedding and retrieval models are already benchmarked against BEIR (e.g. through the MTEB benchmark), utilizing BEIR is more helpful when you have a unique model that you want to evaluate. For instance, after fine-tuning an embedding model on your dataset, it may be helpful to view whether and by how much its performance degrades on a diverse set of domains. This can be an indication of how much data drift may affect your retrieval accuracy, such as if you add documents to your RAG system outside of your fine-tuning training distribution. diff --git a/docs/docs/presentations/materials/2024-06-13-vector-ess-oss-tools.ipynb b/docs/docs/presentations/materials/2024-06-13-vector-ess-oss-tools.ipynb index ae927069edace..977954193cf78 100644 --- a/docs/docs/presentations/materials/2024-06-13-vector-ess-oss-tools.ipynb +++ b/docs/docs/presentations/materials/2024-06-13-vector-ess-oss-tools.ipynb @@ -932,7 +932,7 @@ " SimpleComposableMemory,\n", " ChatMemoryBuffer,\n", ")\n", - "from llama_index.core.agent import FunctionCallingAgentWorker" + "from llama_index.core.agent import FunctionCallingAgent" ] }, { @@ -984,10 +984,12 @@ "metadata": {}, "outputs": [], "source": [ - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", - " [multiply_tool, mystery_tool], llm=openai_llm, verbose=True\n", - ")\n", - "agent = agent_worker.as_agent(memory=composable_memory)" + "agent = FunctionCallingAgent.from_tools(\n", + " [multiply_tool, mystery_tool],\n", + " llm=openai_llm,\n", + " memory=composable_memory,\n", + " verbose=True,\n", + ")" ] }, { @@ -1069,10 +1071,9 @@ "metadata": {}, "outputs": [], "source": [ - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", + "agent_without_memory = FunctionCallingAgent.from_tools(\n", " [multiply_tool, mystery_tool], llm=openai_llm, verbose=True\n", - ")\n", - "agent_without_memory = agent_worker.as_agent()" + ")" ] }, { @@ -1113,9 +1114,7 @@ "outputs": [], "source": [ "llm = OpenAI(model=\"gpt-3.5-turbo-0613\")\n", - "agent_worker = FunctionCallingAgentWorker.from_tools(\n", - " [multiply_tool, mystery_tool], llm=openai_llm, verbose=True\n", - ")\n", + "\n", "composable_memory = SimpleComposableMemory.from_defaults(\n", " primary_memory=ChatMemoryBuffer.from_defaults(),\n", " secondary_memory_sources=[\n", @@ -1125,7 +1124,13 @@ " # later will use original vector_memory again\n", " ],\n", ")\n", - "agent_with_memory = agent_worker.as_agent(memory=composable_memory)" + "\n", + "agent_with_memory = FunctionCallingAgent.from_tools(\n", + " [multiply_tool, mystery_tool],\n", + " llm=openai_llm,\n", + " memory=composable_memory,\n", + " verbose=True,\n", + ")" ] }, { @@ -1418,6 +1423,7 @@ " ToolInteractiveReflectionAgentWorker,\n", ")\n", "from llama_index.agent.openai import OpenAIAgentWorker\n", + "from llama_index.core.agent import FunctionCallingAgentWorker\n", "from llama_index.core.llms import ChatMessage, MessageRole" ] }, diff --git a/docs/docs/production_rag/decouple_chunks.png b/docs/docs/production_rag/decouple_chunks.png deleted file mode 100644 index e4e462099ed3c..0000000000000 Binary files a/docs/docs/production_rag/decouple_chunks.png and /dev/null differ diff --git a/docs/docs/production_rag/doc_agents.png b/docs/docs/production_rag/doc_agents.png deleted file mode 100644 index f6571db808feb..0000000000000 Binary files a/docs/docs/production_rag/doc_agents.png and /dev/null differ diff --git a/docs/docs/production_rag/joint_qa_summary.png b/docs/docs/production_rag/joint_qa_summary.png deleted file mode 100644 index aca66e2a5da37..0000000000000 Binary files a/docs/docs/production_rag/joint_qa_summary.png and /dev/null differ diff --git a/docs/docs/production_rag/structured_retrieval.png b/docs/docs/production_rag/structured_retrieval.png deleted file mode 100644 index 8c1912e05c408..0000000000000 Binary files a/docs/docs/production_rag/structured_retrieval.png and /dev/null differ diff --git a/docs/docs/query/disclosure.png b/docs/docs/query/disclosure.png deleted file mode 100644 index f16649106d17f..0000000000000 Binary files a/docs/docs/query/disclosure.png and /dev/null differ diff --git a/docs/docs/query/pipeline_rag_example.png b/docs/docs/query/pipeline_rag_example.png deleted file mode 100644 index 700db714fb73d..0000000000000 Binary files a/docs/docs/query/pipeline_rag_example.png and /dev/null differ diff --git a/docs/docs/query/query_classes.png b/docs/docs/query/query_classes.png deleted file mode 100644 index e2122d42c1c30..0000000000000 Binary files a/docs/docs/query/query_classes.png and /dev/null differ diff --git a/docs/docs/query_transformations/multi_step_diagram.png b/docs/docs/query_transformations/multi_step_diagram.png deleted file mode 100644 index 254740ff72068..0000000000000 Binary files a/docs/docs/query_transformations/multi_step_diagram.png and /dev/null differ diff --git a/docs/docs/query_transformations/single_step_diagram.png b/docs/docs/query_transformations/single_step_diagram.png deleted file mode 100644 index 477daf979d8ff..0000000000000 Binary files a/docs/docs/query_transformations/single_step_diagram.png and /dev/null differ diff --git a/docs/docs/response/response_1.jpeg b/docs/docs/response/response_1.jpeg deleted file mode 100644 index f2014fde7fd8d..0000000000000 Binary files a/docs/docs/response/response_1.jpeg and /dev/null differ diff --git a/docs/docs/storage/storage.png b/docs/docs/storage/storage.png deleted file mode 100644 index 319ba1772305d..0000000000000 Binary files a/docs/docs/storage/storage.png and /dev/null differ diff --git a/docs/docs/structured_output/diagram1.png b/docs/docs/structured_output/diagram1.png deleted file mode 100644 index e9be22efb3991..0000000000000 Binary files a/docs/docs/structured_output/diagram1.png and /dev/null differ diff --git a/docs/docs/structured_output/program2.png b/docs/docs/structured_output/program2.png deleted file mode 100644 index a7f852955d01b..0000000000000 Binary files a/docs/docs/structured_output/program2.png and /dev/null differ diff --git a/docs/docs/understanding/workflows/branches_and_loops.md b/docs/docs/understanding/workflows/branches_and_loops.md index c5fcbbedb2411..5851c5db517cb 100644 --- a/docs/docs/understanding/workflows/branches_and_loops.md +++ b/docs/docs/understanding/workflows/branches_and_loops.md @@ -86,4 +86,6 @@ Our imports are the same as before, but we've created 4 new event types. `start` ![A simple branch](./branching.png) -You can of course combine branches and loops in any order to fulfill the needs of your application. Later in this tutorial you'll learn how to run multiple branches in parallel using `send_event` and synchronize them using `collect_events`. Up next we'll learn about [maintaining state](state.md) with Context. +You can of course combine branches and loops in any order to fulfill the needs of your application. Later in this tutorial you'll learn how to run multiple branches in parallel using `send_event` and synchronize them using `collect_events`. + +Up next we'll learn about [maintaining state](state.md) with Context. diff --git a/docs/docs/understanding/workflows/concurrent_execution.md b/docs/docs/understanding/workflows/concurrent_execution.md index a707720770ee6..3596d0969731c 100644 --- a/docs/docs/understanding/workflows/concurrent_execution.md +++ b/docs/docs/understanding/workflows/concurrent_execution.md @@ -10,9 +10,9 @@ In our examples so far, we've only emitted one event from each step. But there a class ParallelFlow(Workflow): @step async def start(self, ctx: Context, ev: StartEvent) -> StepTwoEvent: - self.send_event(StepTwoEvent(query="Query 1")) - self.send_event(StepTwoEvent(query="Query 2")) - self.send_event(StepTwoEvent(query="Query 3")) + ctx.send_event(StepTwoEvent(query="Query 1")) + ctx.send_event(StepTwoEvent(query="Query 2")) + ctx.send_event(StepTwoEvent(query="Query 3")) @step(num_workers=4) async def step_two(self, ctx: Context, ev: StepTwoEvent) -> StopEvent: @@ -32,9 +32,9 @@ If you execute the previous example, you'll note that the workflow stops after w class ConcurrentFlow(Workflow): @step async def start(self, ctx: Context, ev: StartEvent) -> StepTwoEvent: - self.send_event(StepTwoEvent(query="Query 1")) - self.send_event(StepTwoEvent(query="Query 2")) - self.send_event(StepTwoEvent(query="Query 3")) + ctx.send_event(StepTwoEvent(query="Query 1")) + ctx.send_event(StepTwoEvent(query="Query 2")) + ctx.send_event(StepTwoEvent(query="Query 3")) @step(num_workers=4) async def step_two(self, ctx: Context, ev: StepTwoEvent) -> StepThreeEvent: @@ -70,9 +70,9 @@ class ConcurrentFlow(Workflow): async def start( self, ctx: Context, ev: StartEvent ) -> StepAEvent | StepBEvent | StepCEvent: - self.send_event(StepAEvent(query="Query 1")) - self.send_event(StepBEvent(query="Query 2")) - self.send_event(StepCEvent(query="Query 3")) + ctx.send_event(StepAEvent(query="Query 1")) + ctx.send_event(StepBEvent(query="Query 2")) + ctx.send_event(StepCEvent(query="Query 3")) @step async def step_a(self, ctx: Context, ev: StepAEvent) -> StepACompleteEvent: @@ -122,4 +122,4 @@ The visualization of this workflow is quite pleasing: ![A concurrent workflow](./different_events.png) -Now let's look at how [we can extend workflows](extending.md). +Now let's look at how we can extend workflows with [subclassing](subclass.md) and other techniques. diff --git a/docs/docs/understanding/workflows/index.md b/docs/docs/understanding/workflows/index.md index 93793d34bb8c3..6b3f1accd2cab 100644 --- a/docs/docs/understanding/workflows/index.md +++ b/docs/docs/understanding/workflows/index.md @@ -36,4 +36,8 @@ For simple RAG pipelines and linear demos we do not expect you will need Workflo ## Next steps -Let's build [a basic workflow](basic_flow.md). +Let's build [a basic workflow](basic_flow.md). Follow the tutorial sequence step-by-step to learn the core concepts. + +Once you're done, check out our [Workflows component guide](../../module_guides/workflow/index.md) as a reference guide + more practical examples on building RAG/agents. + +If you're done building and want to deploy your workflow to production, check out [our llama_deploy guide](../../module_guides/workflow/deployment.md) ([repo](https://github.com/run-llama/llama_deploy)). diff --git a/docs/docs/understanding/workflows/nested.md b/docs/docs/understanding/workflows/nested.md index 5260a3dfaaad4..c7124a5622f3a 100644 --- a/docs/docs/understanding/workflows/nested.md +++ b/docs/docs/understanding/workflows/nested.md @@ -1,61 +1,94 @@ -# Nested workflows - -Another way to extend workflows is to nest additional workflows. It's possible to create explicit slots in existing flows where you can supply an entire additional workflow. For example, let's say we had a query that used an LLM to reflect on the quality of that query. The author might expect that you would want to modify the reflection step, and leave a slot for you to do that. - -Here's our base workflow: - -```python -from llama_index.core.workflow import ( - StartEvent, - StopEvent, - Workflow, - step, - Event, - Context, -) -from llama_index.utils.workflow import draw_all_possible_flows - - -class Step2Event(Event): - query: str - - -class MainWorkflow(Workflow): - @step - async def start( - self, ctx: Context, ev: StartEvent, reflection_workflow: Workflow - ) -> Step2Event: - print("Need to run reflection") - res = await reflection_workflow.run(query=ev.query) - - return Step2Event(query=res) - - @step - async def step_two(self, ctx: Context, ev: Step2Event) -> StopEvent: - print("Query is ", ev.query) - # do something with the query here - return StopEvent(result=ev.query) -``` - -This workflow by itself will not run; it needs a valid workflow for the reflection step. Let's create one: - -```python -class ReflectionFlow(Workflow): - @step - async def sub_start(self, ctx: Context, ev: StartEvent) -> StopEvent: - print("Doing custom reflection") - return StopEvent(result="Improved query") -``` - -Now we can run the main workflow by supplying this custom reflection nested flow using the `add_workflows` method, to which we pass an instance of the `ReflectionFlow` class: - -```python -w = MainWorkflow(timeout=10, verbose=False) -w.add_workflows(reflection_workflow=ReflectionFlow()) -result = await w.run(query="Initial query") -print(result) -``` - -Note that because the nested flow is a totally different workflow rather than a step, `draw_all_possible_flows` will only draw the flow of `MainWorkflow`. - -Finally, let's take a look at [observability and debugging](observability.md) in workflows. +# Nested workflows + +Another way to extend workflows is to nest additional workflows. It's possible to create explicit slots in existing flows where you can supply an entire additional workflow. For example, let's say we had a query that used an LLM to reflect on the quality of that query. The author might expect that you would want to modify the reflection step, and leave a slot for you to do that. + +Here's our base workflow: + +```python +from llama_index.core.workflow import ( + StartEvent, + StopEvent, + Workflow, + step, + Event, + Context, +) +from llama_index.utils.workflow import draw_all_possible_flows + + +class Step2Event(Event): + query: str + + +class MainWorkflow(Workflow): + @step + async def start( + self, ctx: Context, ev: StartEvent, reflection_workflow: Workflow + ) -> Step2Event: + print("Need to run reflection") + res = await reflection_workflow.run(query=ev.query) + + return Step2Event(query=res) + + @step + async def step_two(self, ctx: Context, ev: Step2Event) -> StopEvent: + print("Query is ", ev.query) + # do something with the query here + return StopEvent(result=ev.query) +``` + +This workflow by itself will not run; it needs a valid workflow for the reflection step. Let's create one: + +```python +class ReflectionFlow(Workflow): + @step + async def sub_start(self, ctx: Context, ev: StartEvent) -> StopEvent: + print("Doing custom reflection") + return StopEvent(result="Improved query") +``` + +Now we can run the main workflow by supplying this custom reflection nested flow using the `add_workflows` method, to which we pass an instance of the `ReflectionFlow` class: + +```python +w = MainWorkflow(timeout=10, verbose=False) +w.add_workflows(reflection_workflow=ReflectionFlow()) +result = await w.run(query="Initial query") +print(result) +``` + +Note that because the nested flow is a totally different workflow rather than a step, `draw_all_possible_flows` will only draw the flow of `MainWorkflow`. + +## Default workflows + +If you're creating a workflow with multiple slots for nested workflows, you might want to provide default workflows for each slot. You can do this by setting the default value of the slot to an instance of the workflow class. Here's an example. + +First, let's create a default sub-workflow to use: + +```python +class DefaultSubflow(Workflow): + @step() + async def sub_start(self, ctx: Context, ev: StartEvent) -> StopEvent: + print("Doing basic reflection") + return StopEvent(result="Improved query") +``` + +Now we can modify the `MainWorkflow` to include a default sub-workflow: + +```python +class MainWorkflow(Workflow): + @step() + async def start( + self, + ctx: Context, + ev: StartEvent, + reflection_workflow: Workflow = DefaultSubflow(), + ) -> Step2Event: + print("Need to run reflection") + res = await reflection_workflow.run(query=ev.query) + + return Step2Event(query=res) +``` + +Now, if you run the workflow without providing a custom reflection workflow, it will use the default one. This can be very useful for providing a good "out of the box" experience for users who may not want to customize everything. + +Finally, let's take a look at [observability and debugging](observability.md) in workflows. diff --git a/docs/docs/understanding/workflows/observability.md b/docs/docs/understanding/workflows/observability.md index 2932e0013d09c..9d6aa7a2b7b72 100644 --- a/docs/docs/understanding/workflows/observability.md +++ b/docs/docs/understanding/workflows/observability.md @@ -26,9 +26,9 @@ class ConcurrentFlow(Workflow): async def start( self, ctx: Context, ev: StartEvent ) -> StepAEvent | StepBEvent | StepCEvent: - self.send_event(StepAEvent(query="Query 1")) - self.send_event(StepBEvent(query="Query 2")) - self.send_event(StepCEvent(query="Query 3")) + ctx.send_event(StepAEvent(query="Query 1")) + ctx.send_event(StepBEvent(query="Query 2")) + ctx.send_event(StepCEvent(query="Query 3")) @step async def step_a(self, ctx: Context, ev: StepAEvent) -> StepACompleteEvent: @@ -128,6 +128,6 @@ You can also use any of the third-party tools for visualizing and debugging that ![Arize flow](./arize.png) -## That's it! +## One more thing -Congratulations, you've completed the workflows tutorial! +Our last step in this tutorial is an alternative syntax for defining workflows using [unbound functions](unbound_functions.md) instead of classes. diff --git a/docs/docs/understanding/workflows/state.md b/docs/docs/understanding/workflows/state.md index 1db3c0db91c16..92b7ce23eb5e2 100644 --- a/docs/docs/understanding/workflows/state.md +++ b/docs/docs/understanding/workflows/state.md @@ -33,7 +33,8 @@ class StatefulFlow(Workflow): async def start( self, ctx: Context, ev: StartEvent ) -> SetupEvent | StepTwoEvent: - if "some_database" not in ctx.data: + db = await ctx.get("some_database", default=None) + if db is None: print("Need to load data") return SetupEvent(query=ev.query) @@ -43,7 +44,7 @@ class StatefulFlow(Workflow): @step async def setup(self, ctx: Context, ev: SetupEvent) -> StartEvent: # load data - ctx.data["some_database"] = [1, 2, 3] + await ctx.set("some_database", [1, 2, 3]) return StartEvent(query=ev.query) ``` @@ -53,9 +54,9 @@ Then in `step_two` we can access data directly from the context without having i @step async def step_two(self, ctx: Context, ev: StepTwoEvent) -> StopEvent: # do something with the data - print("Data is ", ctx.data["some_database"]) + print("Data is ", await ctx.get("some_database")) - return StopEvent(result=ctx.data["some_database"][1]) + return StopEvent(result=await ctx.get("some_database")[1]) w = StatefulFlow(timeout=10, verbose=False) @@ -63,8 +64,4 @@ result = await w.run(query="Some query") print(result) ``` -## Context persists between runs - -Note that the `Context` object persists between runs of the workflow. This means that you can load data into the context in one run and access it in a later run. This can be useful for caching data or for maintaining state between runs. - -Next let's look at [concurrent execution](concurrent_execution.md). +Up next we'll learn how to [stream events](stream.md) from an in-progress workflow. diff --git a/docs/docs/understanding/workflows/stream.md b/docs/docs/understanding/workflows/stream.md new file mode 100644 index 0000000000000..d2ab0bc9cee6a --- /dev/null +++ b/docs/docs/understanding/workflows/stream.md @@ -0,0 +1,90 @@ +# Streaming events + +Workflows can be complex -- they are designed to handle complex, branching, concurrent logic -- which means they can take time to fully execute. To provide your user with a good experience, you may want to provide an indication of progress by streaming events as they occur. Workflows have built-in support for this on the `Context` object. + +To get this done, let's bring in all the deps we need: + +```python +from llama_index.core.workflow import ( + StartEvent, + StopEvent, + Workflow, + step, + Event, + Context, +) +import asyncio +from llama_index.llms.openai import OpenAI +from llama_index.utils.workflow import draw_all_possible_flows +``` + +Let's set up some events for a simple three-step workflow: + +```python +class FirstEvent(Event): + first_output: str + + +class SecondEvent(Event): + second_output: str + response: str +``` + +And define a workflow class that sends events: + +```python +class MyWorkflow(Workflow): + @step + async def step_one(self, ctx: Context, ev: StartEvent) -> FirstEvent: + ctx.write_event_to_stream(Event(msg="Step one is happening")) + return FirstEvent(first_output="First step complete.") + + @step + async def step_two(self, ctx: Context, ev: FirstEvent) -> SecondEvent: + llm = OpenAI(model="gpt-4o-mini") + generator = await llm.astream_complete( + "Please give me the first 3 paragraphs of Moby Dick, a book in the public domain." + ) + async for response in generator: + # Allow the workflow to stream this piece of response + ctx.write_event_to_stream(Event(msg=response.delta)) + return SecondEvent( + second_output="Second step complete, full response attached", + response=str(response), + ) + + @step + async def step_three(self, ctx: Context, ev: SecondEvent) -> StopEvent: + ctx.write_event_to_stream(Event(msg="Step three is happening")) + return StopEvent(result="Workflow complete.") +``` + +!!! tip + `OpenAI()` here assumes you have an `OPENAI_API_KEY` set in your environment. You could also pass one in using the `api_key` parameter. + +In `step_one` and `step_three` we write individual events to the event stream. In `step_two` we use `astream_complete` to produce an iterable generator of the LLM's response, then we produce an event for each chunk of data the LLM sends back to us -- roughly one per word -- before returning the final response to `step_three`. + +To actually get this output, we need to run the workflow asynchronously and listen for the events, like this: + +```python +async def main(): + w = MyWorkflow(timeout=30, verbose=True) + task = asyncio.create_task(w.run(first_input="Start the workflow.")) + + async for ev in w.stream_events(): + print(ev.msg) + + final_result = await task + print("Final result", final_result) + + draw_all_possible_flows(MyWorkflow, filename="streaming_workflow.html") + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +`create_task` runs the workflow in the background, while `stream_events` will provide any event that gets written to the stream. It stops when the stream delivers a `StopEvent`, after which you can get the final result of the workflow as you normally would. + + +Next let's look at [concurrent execution](concurrent_execution.md). diff --git a/docs/docs/understanding/workflows/subclass.md b/docs/docs/understanding/workflows/subclass.md index fc7b4c25d86fe..17b1f75aea167 100644 --- a/docs/docs/understanding/workflows/subclass.md +++ b/docs/docs/understanding/workflows/subclass.md @@ -96,4 +96,4 @@ draw_all_possible_flows(CustomWorkflow, "custom_workflow.html") ![Custom workflow](subclass.png) -Next, let's look at [nested workflows](nested.md). +Next, let's look at another way to extend a workflow: [nested workflows](nested.md). diff --git a/docs/docs/understanding/workflows/unbound_functions.md b/docs/docs/understanding/workflows/unbound_functions.md new file mode 100644 index 0000000000000..9781c40f705cc --- /dev/null +++ b/docs/docs/understanding/workflows/unbound_functions.md @@ -0,0 +1,26 @@ +# Workflows from unbound functions + +Throughout this tutorial we have been showing workflows defined as classes. However, this is not the only way to define a workflow: you can also define the steps in your workflow through independent or "unbound" functions and assign them to a workflow using the `@step()` decorator. Let's see how that works. + +First we create an empty class to hold the steps: + +```python +class TestWorkflow(Workflow): + pass +``` + +Now we can add steps to the workflow by defining functions and decorating them with the `@step()` decorator: + +```python +@step(workflow=TestWorkflow) +def some_step(ev: StartEvent) -> StopEvent: + return StopEvent() +``` + +In this example, we're adding a starting step to the `TestWorkflow` class. The `@step()` decorator takes the `workflow` argument, which is the class to which the step will be added. The function signature is the same as for a regular step, with the exception of the `workflow` argument. + +You can also add steps this way to any existing workflow class! This can be handy if you just need one extra step in your workflow and don't want to subclass an entire workflow to do it. + +## That's it! + +Congratulations, you've completed the workflows tutorial! diff --git a/docs/docs/vector_stores/faiss_index_0.png b/docs/docs/vector_stores/faiss_index_0.png deleted file mode 100644 index a8aa17bf5c0dd..0000000000000 Binary files a/docs/docs/vector_stores/faiss_index_0.png and /dev/null differ diff --git a/docs/docs/vector_stores/faiss_index_1.png b/docs/docs/vector_stores/faiss_index_1.png deleted file mode 100644 index 285bae5b88cf8..0000000000000 Binary files a/docs/docs/vector_stores/faiss_index_1.png and /dev/null differ diff --git a/docs/docs/vector_stores/pinecone_index_0.png b/docs/docs/vector_stores/pinecone_index_0.png deleted file mode 100644 index f86fd0bbb1ccd..0000000000000 Binary files a/docs/docs/vector_stores/pinecone_index_0.png and /dev/null differ diff --git a/docs/docs/vector_stores/pinecone_reader.png b/docs/docs/vector_stores/pinecone_reader.png deleted file mode 100644 index cedae4292122c..0000000000000 Binary files a/docs/docs/vector_stores/pinecone_reader.png and /dev/null differ diff --git a/docs/docs/vector_stores/qdrant_index_0.png b/docs/docs/vector_stores/qdrant_index_0.png deleted file mode 100644 index 287a125ba0760..0000000000000 Binary files a/docs/docs/vector_stores/qdrant_index_0.png and /dev/null differ diff --git a/docs/docs/vector_stores/qdrant_reader.png b/docs/docs/vector_stores/qdrant_reader.png deleted file mode 100644 index 64b928c9e59c9..0000000000000 Binary files a/docs/docs/vector_stores/qdrant_reader.png and /dev/null differ diff --git a/docs/docs/vector_stores/simple_index_0.png b/docs/docs/vector_stores/simple_index_0.png deleted file mode 100644 index 3318acefcda1a..0000000000000 Binary files a/docs/docs/vector_stores/simple_index_0.png and /dev/null differ diff --git a/docs/docs/vector_stores/weaviate_index_0.png b/docs/docs/vector_stores/weaviate_index_0.png deleted file mode 100644 index 0a03dd1dcbc2e..0000000000000 Binary files a/docs/docs/vector_stores/weaviate_index_0.png and /dev/null differ diff --git a/docs/docs/vector_stores/weaviate_reader_0.png b/docs/docs/vector_stores/weaviate_reader_0.png deleted file mode 100644 index a3129b8db5bb9..0000000000000 Binary files a/docs/docs/vector_stores/weaviate_reader_0.png and /dev/null differ diff --git a/docs/docs/vector_stores/weaviate_reader_1.png b/docs/docs/vector_stores/weaviate_reader_1.png deleted file mode 100644 index 4313310b11450..0000000000000 Binary files a/docs/docs/vector_stores/weaviate_reader_1.png and /dev/null differ diff --git a/docs/merge_llama_deploy_docs.py b/docs/merge_llama_deploy_docs.py new file mode 100644 index 0000000000000..82be5b7914afb --- /dev/null +++ b/docs/merge_llama_deploy_docs.py @@ -0,0 +1,97 @@ +import os +import yaml + +# clone the llama_deploy repo +if not os.path.exists("llama_deploy"): + os.system("git clone https://github.com/run-llama/llama_deploy.git") + + print("Cloned llama_deploy") +else: + os.system("git -C llama_deploy pull") + + print("Updated llama_deploy") + +# copy the llama_deploy/docs/docs/api_reference/llama_deploy to the current docs/api_reference +os.system( + "cp -r llama_deploy/docs/docs/api_reference/llama_deploy ./docs/docs/api_reference/" +) + +# copy the module guides +os.system( + "cp -r llama_deploy/docs/docs/module_guides/workflow/deployment.md ./docs/docs/module_guides/workflow/deployment.md" +) + +print("Copied in latest llama-deploy docs") + +# open current mkdocs.yml +with open("./docs/mkdocs.yml") as f: + mkdocs = yaml.safe_load(f) + +# open llama-deploy mkdocs.yml +with open("./llama_deploy/docs/mkdocs.yml") as f: + llama_deploy_mkdocs = yaml.safe_load(f) + +llama_docs_api_reference_idx = 0 +for idx, item in enumerate(llama_deploy_mkdocs["nav"]): + if isinstance(item, dict) and "API Reference" in item: + llama_docs_api_reference_idx = idx + break + +# Add links to llama-deploy api reference to nav +for nav_idx, item in enumerate(mkdocs["nav"]): + if isinstance(item, dict) and "API Reference" in item: + api_reference = item["API Reference"] + for api_ref_idx, api_ref in enumerate(api_reference): + if isinstance(api_ref, dict) and "LLMs" in api_ref: + # Find the Llama Deploy API reference in llama_deploy_mkdocs + break + + api_reference.insert( + api_ref_idx, + { + "Llama Deploy": llama_deploy_mkdocs["nav"][ + llama_docs_api_reference_idx + ]["API Reference"] + }, + ) + break + +print("Merged Llama Deploy API Reference") + +# Add search paths from llama-deploy mkdocs.yml +mkdocs_plugins_idx = 0 +for idx, item in enumerate(mkdocs["plugins"]): + if isinstance(item, dict) and "mkdocstrings" in item: + mkdocs_plugins_idx = idx + break + +mkdocs["plugins"][mkdocs_plugins_idx]["mkdocstrings"]["handlers"]["python"][ + "paths" +].append("../llama_deploy") + +print("Updated search paths") + +# Add the deployment guide to the nav +for value in mkdocs["nav"]: + if isinstance(value, dict) and "Component Guides" in value: + for item in value["Component Guides"]: + if isinstance(item, dict) and "Querying" in item: + for section in item["Querying"]: + if isinstance(section, dict) and "Workflows" in section: + if isinstance(section["Workflows"], str): + section["Workflows"] = [ + section["Workflows"], + "./module_guides/workflow/deployment.md", + ] + else: + section["Workflows"].append( + "./module_guides/workflow/deployment.md" + ) + +print("Updated nav") + +# Save the updated mkdocs.yml +with open("./docs/mkdocs.yml", "w") as f: + yaml.dump(mkdocs, f, sort_keys=False) + +print("Updated mkdocs.yml saved") diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index fd26ea5a52ef0..d3c056a490ea6 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -52,10 +52,12 @@ nav: - A basic workflow: ./understanding/workflows/basic_flow.md - Branches and loops: ./understanding/workflows/branches_and_loops.md - Maintaining state: ./understanding/workflows/state.md + - Streaming events: ./understanding/workflows/stream.md - Concurrent execution: ./understanding/workflows/concurrent_execution.md - Subclassing workflows: ./understanding/workflows/subclass.md - Nested workflows: ./understanding/workflows/nested.md - Observability: ./understanding/workflows/observability.md + - Unbound syntax: ./understanding/workflows/unbound_functions.md - Tracing and Debugging: ./understanding/tracing_and_debugging/tracing_and_debugging.md - Evaluating: - ./understanding/evaluating/evaluating.md @@ -102,6 +104,7 @@ nav: - ./examples/agent/mistral_agent.ipynb - ./examples/agent/multi_document_agents-v1.ipynb - ./examples/agent/multi_document_agents.ipynb + - ./examples/agent/nvidia_agent.ipynb - ./examples/agent/openai_agent.ipynb - ./examples/agent/openai_agent_context_retrieval.ipynb - ./examples/agent/openai_agent_lengthy_tools.ipynb @@ -119,6 +122,7 @@ nav: - ./examples/agent/react_agent_with_query_engine.ipynb - ./examples/agent/return_direct_agent.ipynb - ./examples/agent/structured_planner.ipynb + - ./examples/agents/nvidia_agent.ipynb - Chat Engines: - ./examples/chat_engine/chat_engine_best.ipynb - ./examples/chat_engine/chat_engine_condense_plus_context.ipynb @@ -141,6 +145,15 @@ nav: - ./examples/cookbooks/llama3_cookbook_ollama_replicate.ipynb - ./examples/cookbooks/mistralai.ipynb - ./examples/cookbooks/mixedbread_reranker.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-2/Components_Of_LlamaIndex.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-3/Evaluating_RAG_Systems.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-4/Ingestion_Pipeline.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-4/Metadata_Extraction.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-5/Observability.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-6/Agents.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-6/Router_And_SubQuestion_QueryEngine.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-7/Multi_Modal_RAG_System.ipynb + - ./examples/cookbooks/oreilly_course_cookbooks/Module-8/Advanced_RAG_with_LlamaParse.ipynb - ./examples/cookbooks/prometheus2_cookbook.ipynb - Customization: - ./examples/customization/llms/AzureOpenAI.ipynb @@ -264,6 +277,7 @@ nav: - ./examples/evaluation/semantic_similarity_eval.ipynb - Finetuning: - ./examples/finetuning/cross_encoder_finetuning/cross_encoder_finetuning.ipynb + - ./examples/finetuning/embeddings/finetune_corpus_embedding.ipynb - ./examples/finetuning/embeddings/finetune_embedding.ipynb - ./examples/finetuning/embeddings/finetune_embedding_adapter.ipynb - ./examples/finetuning/gradient/gradient_fine_tuning.ipynb @@ -334,6 +348,7 @@ nav: - ./examples/llm/nvidia.ipynb - ./examples/llm/nvidia_nim.ipynb - ./examples/llm/nvidia_tensorrt.ipynb + - ./examples/llm/nvidia_text_completion.ipynb - ./examples/llm/nvidia_triton.ipynb - ./examples/llm/oci_genai.ipynb - ./examples/llm/octoai.ipynb @@ -456,6 +471,7 @@ nav: - ./examples/observability/MLflow.ipynb - ./examples/observability/OpenInferenceCallback.ipynb - ./examples/observability/OpenLLMetry.ipynb + - ./examples/observability/OpikCallback.ipynb - ./examples/observability/PromptLayerHandler.ipynb - ./examples/observability/TokenCountingHandler.ipynb - ./examples/observability/UpTrainCallback.ipynb @@ -471,6 +487,7 @@ nav: - ./examples/output_parsing/llm_program.ipynb - ./examples/output_parsing/lmformatenforcer_pydantic_program.ipynb - ./examples/output_parsing/lmformatenforcer_regular_expressions.ipynb + - ./examples/output_parsing/nvidia_output_parsing.ipynb - ./examples/output_parsing/openai_pydantic_program.ipynb - ./examples/output_parsing/openai_sub_question.ipynb - Param Optimizer: @@ -572,6 +589,7 @@ nav: - ./examples/vector_stores/AwadbDemo.ipynb - ./examples/vector_stores/AzureAISearchIndexDemo.ipynb - ./examples/vector_stores/AzureCosmosDBMongoDBvCoreDemo.ipynb + - ./examples/vector_stores/AzureCosmosDBNoSqlDemo.ipynb - ./examples/vector_stores/BagelAutoRetriever.ipynb - ./examples/vector_stores/BagelIndexDemo.ipynb - ./examples/vector_stores/BaiduVectorDBIndexDemo.ipynb @@ -623,6 +641,7 @@ nav: - ./examples/vector_stores/SimpleIndexDemoMMR.ipynb - ./examples/vector_stores/SimpleIndexOnS3.ipynb - ./examples/vector_stores/SupabaseVectorIndexDemo.ipynb + - ./examples/vector_stores/TablestoreDemo.ipynb - ./examples/vector_stores/TairIndexDemo.ipynb - ./examples/vector_stores/TencentVectorDBIndexDemo.ipynb - ./examples/vector_stores/TiDBVector.ipynb @@ -637,6 +656,7 @@ nav: - ./examples/vector_stores/WeaviateIndexDemo.ipynb - ./examples/vector_stores/WeaviateIndex_auto_retriever.ipynb - ./examples/vector_stores/WeaviateIndex_metadata_filter.ipynb + - ./examples/vector_stores/WordLiftDemo.ipynb - ./examples/vector_stores/ZepIndexDemo.ipynb - ./examples/vector_stores/chroma_auto_retriever.ipynb - ./examples/vector_stores/chroma_metadata_filter.ipynb @@ -651,15 +671,18 @@ nav: - ./examples/vector_stores/qdrant_hybrid.ipynb - Workflow: - ./examples/workflow/JSONalyze_query_engine.ipynb + - ./examples/workflow/advanced_text_to_sql.ipynb - ./examples/workflow/citation_query_engine.ipynb - ./examples/workflow/corrective_rag_pack.ipynb - ./examples/workflow/function_calling_agent.ipynb - ./examples/workflow/long_rag_pack.ipynb + - ./examples/workflow/multi_step_query_engine.ipynb - ./examples/workflow/multi_strategy_workflow.ipynb - ./examples/workflow/parallel_execution.ipynb - ./examples/workflow/rag.ipynb - ./examples/workflow/react_agent.ipynb - ./examples/workflow/reflection.ipynb + - ./examples/workflow/router_query_engine.ipynb - ./examples/workflow/self_discover_workflow.ipynb - ./examples/workflow/sub_question_query_engine.ipynb - ./examples/workflow/workflows_cookbook.ipynb @@ -768,6 +791,7 @@ nav: - ./module_guides/observability/index.md - ./module_guides/observability/instrumentation.md - Settings: ./module_guides/supporting_modules/settings.md + - Workflows: ./module_guides/workflow/index.md - Advanced Topics: - ./optimizing/production_rag.md - ./optimizing/basic_strategies/basic_strategies.md @@ -803,6 +827,7 @@ nav: - ./api_reference/callbacks/literalai.md - ./api_reference/callbacks/llama_debug.md - ./api_reference/callbacks/openinference.md + - ./api_reference/callbacks/opik.md - ./api_reference/callbacks/promptlayer.md - ./api_reference/callbacks/token_counter.md - ./api_reference/callbacks/uptrain.md @@ -816,6 +841,7 @@ nav: - Embeddings: - ./api_reference/embeddings/adapter.md - ./api_reference/embeddings/alephalpha.md + - ./api_reference/embeddings/alibabacloud_aisearch.md - ./api_reference/embeddings/anyscale.md - ./api_reference/embeddings/azure_inference.md - ./api_reference/embeddings/azure_openai.md @@ -865,6 +891,7 @@ nav: - ./api_reference/embeddings/upstage.md - ./api_reference/embeddings/vertex.md - ./api_reference/embeddings/voyageai.md + - ./api_reference/embeddings/xinference.md - ./api_reference/embeddings/yandexgpt.md - Evaluation: - ./api_reference/evaluation/answer_relevancy.md @@ -912,6 +939,7 @@ nav: - ./api_reference/llms/OptimumIntelLLM.md - ./api_reference/llms/ai21.md - ./api_reference/llms/alephalpha.md + - ./api_reference/llms/alibabacloud_aisearch.md - ./api_reference/llms/anthropic.md - ./api_reference/llms/anyscale.md - ./api_reference/llms/azure_inference.md @@ -1085,6 +1113,7 @@ nav: - ./api_reference/multi_modal_llms/openai.md - ./api_reference/multi_modal_llms/replicate.md - Node Parsers & Text Splitters: + - ./api_reference/node_parser/alibabacloud_aisearch.md - ./api_reference/node_parser/dashscope.md - ./api_reference/node_parsers/code.md - ./api_reference/node_parsers/hierarchical.md @@ -1102,6 +1131,7 @@ nav: - Node Postprocessors: - ./api_reference/postprocessor/NER_PII.md - ./api_reference/postprocessor/PII.md + - ./api_reference/postprocessor/alibabacloud_aisearch_rerank.md - ./api_reference/postprocessor/auto_prev_next.md - ./api_reference/postprocessor/cohere_rerank.md - ./api_reference/postprocessor/colbert_rerank.md @@ -1129,6 +1159,7 @@ nav: - ./api_reference/postprocessor/tei_rerank.md - ./api_reference/postprocessor/time_weighted.md - ./api_reference/postprocessor/voyageai_rerank.md + - ./api_reference/postprocessor/xinference_rerank.md - Object Stores: - ./api_reference/objects/index.md - Output Parsers: @@ -1204,6 +1235,7 @@ nav: - ./api_reference/readers/airbyte_typeform.md - ./api_reference/readers/airbyte_zendesk_support.md - ./api_reference/readers/airtable.md + - ./api_reference/readers/alibabacloud_aisearch.md - ./api_reference/readers/apify.md - ./api_reference/readers/arango_db.md - ./api_reference/readers/arxiv.md @@ -1377,9 +1409,12 @@ nav: - ./api_reference/retrievers/you.md - Schema: - ./api_reference/schema/index.md + - Selectors: + - ./api_reference/selectors/notdiamond.md - Storage: - Chat Store: - ./api_reference/storage/chat_store/azure.md + - ./api_reference/storage/chat_store/dynamodb.md - ./api_reference/storage/chat_store/index.md - ./api_reference/storage/chat_store/redis.md - ./api_reference/storage/chat_store/simple.md @@ -1433,6 +1468,7 @@ nav: - ./api_reference/storage/vector_store/awsdocdb.md - ./api_reference/storage/vector_store/azureaisearch.md - ./api_reference/storage/vector_store/azurecosmosmongo.md + - ./api_reference/storage/vector_store/azurecosmosnosql.md - ./api_reference/storage/vector_store/bagel.md - ./api_reference/storage/vector_store/baiduvectordb.md - ./api_reference/storage/vector_store/cassandra.md @@ -1458,6 +1494,7 @@ nav: - ./api_reference/storage/vector_store/lancedb.md - ./api_reference/storage/vector_store/lantern.md - ./api_reference/storage/vector_store/lindorm.md + - ./api_reference/storage/vector_store/mariadb.md - ./api_reference/storage/vector_store/metal.md - ./api_reference/storage/vector_store/milvus.md - ./api_reference/storage/vector_store/mongodb.md @@ -1475,6 +1512,7 @@ nav: - ./api_reference/storage/vector_store/simple.md - ./api_reference/storage/vector_store/singlestoredb.md - ./api_reference/storage/vector_store/supabase.md + - ./api_reference/storage/vector_store/tablestore.md - ./api_reference/storage/vector_store/tair.md - ./api_reference/storage/vector_store/tencentvectordb.md - ./api_reference/storage/vector_store/tidbvector.md @@ -1544,6 +1582,7 @@ nav: - ./api_reference/workflow/context.md - ./api_reference/workflow/events.md - ./api_reference/workflow/workflow.md + - ./api_reference/workflow/retry_policy.md - Open-Source Community: - Integrations: ./community/integrations.md - Full Stack Projects: ./community/full_stack_projects.md @@ -2136,6 +2175,19 @@ plugins: - ../llama-index-integrations/llms/llama-index-llms-sambanova - ../llama-index-integrations/embeddings/llama-index-embeddings-gigachat - ../llama-index-integrations/llms/llama-index-llms-gigachat + - ../llama-index-integrations/callbacks/llama-index-callbacks-opik + - ../llama-index-integrations/embeddings/llama-index-embeddings-xinference + - ../llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank + - ../llama-index-integrations/selectors/llama-index-selectors-notdiamond + - ../llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb + - ../llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore + - ../llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql + - ../llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch + - ../llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch + - ../llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank + - ../llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb + - ../llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch + - ../llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch - redirects: redirect_maps: ./api/llama_index.vector_stores.MongoDBAtlasVectorSearch.html: api_reference/storage/vector_store/mongodb.md @@ -2234,6 +2286,7 @@ plugins: ./examples/callbacks/UpTrainCallback.md: https://docs.llamaindex.ai/en/stable/examples/observability/UpTrainCallback/ ./examples/callbacks/WandbCallbackHandler.html: https://docs.llamaindex.ai/en/stable/examples/callbacks/WandbCallbackHandler/ ./examples/callbacks/WandbCallbackHandler.md: https://docs.llamaindex.ai/en/stable/examples/observability/WandbCallbackHandler/ + ./examples/callbacks/OpikCallbackHandler.md: https://docs.llamaindex.ai/en/stable/examples/observability/OpikCallbackHandler/ ./examples/chat_engine/chat_engine_best.html: https://docs.llamaindex.ai/en/stable/examples/chat_engine/chat_engine_best/ ./examples/chat_engine/chat_engine_condense_plus_context.html: https://docs.llamaindex.ai/en/stable/examples/chat_engine/chat_engine_condense_plus_context/ ./examples/chat_engine/chat_engine_condense_question.html: https://docs.llamaindex.ai/en/stable/examples/chat_engine/chat_engine_condense_question/ diff --git a/docs/prepare_for_build.py b/docs/prepare_for_build.py index 40279542bfb57..5007144911ec3 100644 --- a/docs/prepare_for_build.py +++ b/docs/prepare_for_build.py @@ -93,6 +93,7 @@ "response_synthesizers": "Response Synthesizers", "retrievers": "Retrievers", "schema": "Schema", + "selectors": "Selectors", "storage": "Storage", "tools": "Tools", "workflow": "Workflow", diff --git a/llama-index-cli/llama_index/cli/rag/base.py b/llama-index-cli/llama_index/cli/rag/base.py index 5fe5e878893b6..61259c10681d7 100644 --- a/llama-index-cli/llama_index/cli/rag/base.py +++ b/llama-index-cli/llama_index/cli/rag/base.py @@ -90,7 +90,7 @@ class RagCLI(BaseModel): ) chat_engine: Optional[CondenseQuestionChatEngine] = Field( description="Chat engine to use for chatting.", - default_factory=None, + default=None, ) file_extractor: Optional[Dict[str, BaseReader]] = Field( description="File extractor to use for extracting text from files.", diff --git a/llama-index-cli/llama_index/cli/upgrade/mappings.json b/llama-index-cli/llama_index/cli/upgrade/mappings.json index 5713b22e50cac..21d514535ab96 100644 --- a/llama-index-cli/llama_index/cli/upgrade/mappings.json +++ b/llama-index-cli/llama_index/cli/upgrade/mappings.json @@ -438,6 +438,7 @@ "TableParams": "llama_index.vector_stores.baiduvectordb", "TableField": "llama_index.vector_stores.baiduvectordb", "TimescaleVectorStore": "llama_index.vector_stores.timescalevector", + "TablestoreVectorStore": "llama_index.vector_stores.tablestore", "DashVectorStore": "llama_index.vector_stores.dashvector", "JaguarVectorStore": "llama_index.vector_stores.jaguar", "FaissVectorStore": "llama_index.vector_stores.faiss", diff --git a/llama-index-cli/pyproject.toml b/llama-index-cli/pyproject.toml index 4bc41f2ba7257..51d7fdb6eb913 100644 --- a/llama-index-cli/pyproject.toml +++ b/llama-index-cli/pyproject.toml @@ -32,7 +32,7 @@ maintainers = [ name = "llama-index-cli" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.3.0" +version = "0.3.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-core/llama_index/core/__init__.py b/llama-index-core/llama_index/core/__init__.py index 0da056dcabd41..2e7d32b09670e 100644 --- a/llama-index-core/llama_index/core/__init__.py +++ b/llama-index-core/llama_index/core/__init__.py @@ -1,6 +1,6 @@ """Init file of LlamaIndex.""" -__version__ = "0.11.1" +__version__ = "0.11.9" import logging from logging import NullHandler diff --git a/llama-index-core/llama_index/core/agent/__init__.py b/llama-index-core/llama_index/core/agent/__init__.py index dcee00de937fd..3e8c21a5385a4 100644 --- a/llama-index-core/llama_index/core/agent/__init__.py +++ b/llama-index-core/llama_index/core/agent/__init__.py @@ -12,6 +12,7 @@ from llama_index.core.agent.runner.parallel import ParallelAgentRunner from llama_index.core.agent.types import Task from llama_index.core.chat_engine.types import AgentChatResponse +from llama_index.core.agent.function_calling.base import FunctionCallingAgent from llama_index.core.agent.function_calling.step import FunctionCallingAgentWorker __all__ = [ @@ -26,6 +27,7 @@ "ReActChatFormatter", "FunctionCallingAgentWorker", "FnAgentWorker", + "FunctionCallingAgent", # beta "MultimodalReActAgentWorker", # schema-related diff --git a/llama-index-core/llama_index/core/agent/custom/pipeline_worker.py b/llama-index-core/llama_index/core/agent/custom/pipeline_worker.py index d7feca5d340eb..ad3a7f053a90d 100644 --- a/llama-index-core/llama_index/core/agent/custom/pipeline_worker.py +++ b/llama-index-core/llama_index/core/agent/custom/pipeline_worker.py @@ -5,6 +5,7 @@ Any, List, Optional, + Sequence, cast, ) @@ -25,7 +26,6 @@ ) from llama_index.core.memory.chat_memory_buffer import ChatMemoryBuffer from llama_index.core.query_pipeline.components.agent import ( - AgentFnComponent, AgentInputComponent, BaseAgentComponent, ) @@ -36,7 +36,9 @@ DEFAULT_MODEL_NAME = "gpt-3.5-turbo-0613" -def _get_agent_components(query_component: QueryComponent) -> List[BaseAgentComponent]: +def _get_agent_components( + query_component: QueryComponent, +) -> Sequence[BaseAgentComponent]: """Get agent components.""" agent_components: List[BaseAgentComponent] = [] for c in query_component.sub_query_components: @@ -116,7 +118,7 @@ def agent_input_component(self) -> AgentInputComponent: return cast(AgentInputComponent, self.pipeline.module_dict[root_key]) @property - def agent_components(self) -> List[AgentFnComponent]: + def agent_components(self) -> Sequence[BaseAgentComponent]: """Get agent output component.""" return _get_agent_components(self.pipeline) diff --git a/llama-index-core/llama_index/core/agent/custom/simple_function.py b/llama-index-core/llama_index/core/agent/custom/simple_function.py index 8d704a91554f2..ac9fe4a07c69d 100644 --- a/llama-index-core/llama_index/core/agent/custom/simple_function.py +++ b/llama-index-core/llama_index/core/agent/custom/simple_function.py @@ -5,7 +5,6 @@ Any, Callable, Dict, - List, Optional, Tuple, ) @@ -25,8 +24,6 @@ AGENT_CHAT_RESPONSE_TYPE, AgentChatResponse, ) -from llama_index.core.tools import adapt_to_async_tool -from llama_index.core.tools.types import AsyncBaseTool from llama_index.core.query_pipeline.components.function import get_parameters @@ -88,10 +85,6 @@ def initialize_step(self, task: Task, **kwargs: Any) -> TaskStep: step_state=step_state, ) - def get_tools(self, input: str) -> List[AsyncBaseTool]: - """Get tools.""" - return [adapt_to_async_tool(t) for t in self._get_tools(input)] - def _get_task_step_response( self, agent_response: AGENT_CHAT_RESPONSE_TYPE, step: TaskStep, is_done: bool ) -> TaskStepOutput: diff --git a/llama-index-core/llama_index/core/agent/function_calling/base.py b/llama-index-core/llama_index/core/agent/function_calling/base.py index bb7a5f2ba2fe3..ed6667f42cd83 100644 --- a/llama-index-core/llama_index/core/agent/function_calling/base.py +++ b/llama-index-core/llama_index/core/agent/function_calling/base.py @@ -1,23 +1,82 @@ """Function calling agent.""" +from typing import Any, List, Optional -from llama_index.core.agent.runner.base import AgentRunner +from llama_index.core.agent.runner.base import AgentRunner, AgentState +from llama_index.core.agent.function_calling.step import ( + FunctionCallingAgentWorker, + DEFAULT_MAX_FUNCTION_CALLS, +) +from llama_index.core.base.llms.types import ChatMessage +from llama_index.core.callbacks import CallbackManager +from llama_index.core.llms.function_calling import FunctionCallingLLM +from llama_index.core.memory.types import BaseMemory +from llama_index.core.objects.base import ObjectRetriever +from llama_index.core.settings import Settings +from llama_index.core.tools.types import BaseTool class FunctionCallingAgent(AgentRunner): """Function calling agent. - Calls any LLM that supports function calling in a while loop until the task is complete. - + Light wrapper around AgentRunner. """ - # def __init__( - # self, - # tools: List[BaseTool], - # llm: OpenAI, - # memory: BaseMemory, - # prefix_messages: List[ChatMessage], - # verbose: bool = False, - # max_function_calls: int = 5, - # default_tool_choice: str = "auto", - # ) + @classmethod + def from_tools( + cls, + tools: Optional[List[BaseTool]] = None, + tool_retriever: Optional[ObjectRetriever[BaseTool]] = None, + llm: Optional[FunctionCallingLLM] = None, + verbose: bool = False, + max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS, + callback_manager: Optional[CallbackManager] = None, + system_prompt: Optional[str] = None, + prefix_messages: Optional[List[ChatMessage]] = None, + memory: Optional[BaseMemory] = None, + chat_history: Optional[List[ChatMessage]] = None, + state: Optional[AgentState] = None, + allow_parallel_tool_calls: bool = True, + **kwargs: Any, + ) -> "FunctionCallingAgent": + """Create a FunctionCallingAgent from a list of tools.""" + tools = tools or [] + + llm = llm or Settings.llm # type: ignore + assert isinstance( + llm, FunctionCallingLLM + ), "llm must be an instance of FunctionCallingLLM" + + if callback_manager is not None: + llm.callback_manager = callback_manager + + if system_prompt is not None: + if prefix_messages is not None: + raise ValueError( + "Cannot specify both system_prompt and prefix_messages" + ) + prefix_messages = [ChatMessage(content=system_prompt, role="system")] + + prefix_messages = prefix_messages or [] + + agent_worker = FunctionCallingAgentWorker.from_tools( + tools, + tool_retriever=tool_retriever, + llm=llm, + verbose=verbose, + max_function_calls=max_function_calls, + callback_manager=callback_manager, + prefix_messages=prefix_messages, + allow_parallel_tool_calls=allow_parallel_tool_calls, + ) + + return cls( + agent_worker=agent_worker, + memory=memory, + chat_history=chat_history, + state=state, + llm=llm, + callback_manager=callback_manager, + verbose=verbose, + **kwargs, + ) diff --git a/llama-index-core/llama_index/core/agent/function_calling/step.py b/llama-index-core/llama_index/core/agent/function_calling/step.py index 5c2e3692adfd6..690b7e9de7f8b 100644 --- a/llama-index-core/llama_index/core/agent/function_calling/step.py +++ b/llama-index-core/llama_index/core/agent/function_calling/step.py @@ -3,7 +3,7 @@ import json import logging import uuid -from typing import Any, List, Optional, cast +from typing import Any, List, Optional, Sequence, cast import asyncio import llama_index.core.instrumentation as instrument from llama_index.core.agent.types import ( @@ -45,7 +45,7 @@ DEFAULT_MAX_FUNCTION_CALLS = 5 -def get_function_by_name(tools: List[BaseTool], name: str) -> Optional[BaseTool]: +def get_function_by_name(tools: Sequence[BaseTool], name: str) -> Optional[BaseTool]: """Get function by name. If the function is not found, None is returned.""" name_to_tool = {tool.metadata.name: tool for tool in tools} return name_to_tool.get(name, None) @@ -100,7 +100,7 @@ def __init__( """Init params.""" if not llm.metadata.is_function_calling_model: raise ValueError( - f"Model name {llm.model} does not support function calling API. " + f"Model name {llm.metadata.model_name} does not support function calling API. " ) self._llm = llm self._verbose = verbose @@ -128,6 +128,7 @@ def from_tools( llm: Optional[FunctionCallingLLM] = None, verbose: bool = False, max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS, + allow_parallel_tool_calls: bool = True, callback_manager: Optional[CallbackManager] = None, system_prompt: Optional[str] = None, prefix_messages: Optional[List[ChatMessage]] = None, @@ -142,7 +143,11 @@ def from_tools( """ tools = tools or [] - llm = llm or Settings.llm + llm = llm or Settings.llm # type: ignore + assert isinstance( + llm, FunctionCallingLLM + ), "llm must be an instance of FunctionCallingLLM" + if callback_manager is not None: llm.callback_manager = callback_manager @@ -163,6 +168,7 @@ def from_tools( verbose=verbose, max_function_calls=max_function_calls, callback_manager=callback_manager, + allow_parallel_tool_calls=allow_parallel_tool_calls, **kwargs, ) @@ -198,7 +204,7 @@ def get_all_messages(self, task: Task) -> List[ChatMessage]: def _call_function( self, - tools: List[BaseTool], + tools: Sequence[BaseTool], tool_call: ToolSelection, memory: BaseMemory, sources: List[ToolOutput], @@ -244,7 +250,7 @@ def _call_function( async def _acall_function( self, - tools: List[BaseTool], + tools: Sequence[BaseTool], tool_call: ToolSelection, memory: BaseMemory, sources: List[ToolOutput], @@ -439,7 +445,7 @@ async def arun_step( # check if any of the tools return directly -- only works if there is one tool call if len(return_directs) == 1 and return_directs[0]: is_done = True - response = tool_outputs[-1].content + response = tool_outputs[-1].content # type: ignore task.extra_state["n_function_calls"] += len(tool_calls) # put tool output in sources and memory diff --git a/llama-index-core/llama_index/core/agent/legacy/react/base.py b/llama-index-core/llama_index/core/agent/legacy/react/base.py index 987ddf9127fe1..9d70172bcd2dd 100644 --- a/llama-index-core/llama_index/core/agent/legacy/react/base.py +++ b/llama-index-core/llama_index/core/agent/legacy/react/base.py @@ -335,7 +335,7 @@ def chat( if chat_history is not None: self._memory.set(chat_history) - self._memory.put(ChatMessage(content=message, role="user")) + self._memory.put(ChatMessage(content=message, role=MessageRole.USER)) current_reasoning: List[BaseReasoningStep] = [] # start loop @@ -374,7 +374,7 @@ async def achat( if chat_history is not None: self._memory.set(chat_history) - self._memory.put(ChatMessage(content=message, role="user")) + self._memory.put(ChatMessage(content=message, role=MessageRole.USER)) current_reasoning: List[BaseReasoningStep] = [] # start loop @@ -412,7 +412,7 @@ def stream_chat( if chat_history is not None: self._memory.set(chat_history) - self._memory.put(ChatMessage(content=message, role="user")) + self._memory.put(ChatMessage(content=message, role=MessageRole.USER)) current_reasoning: List[BaseReasoningStep] = [] # start loop @@ -431,7 +431,7 @@ def stream_chat( # iterate over stream, break out if is final answer after the "Answer: " full_response = ChatResponse( - message=ChatMessage(content=None, role="assistant") + message=ChatMessage(content=None, role=MessageRole.ASSISTANT) ) for latest_chunk in chat_stream: full_response = latest_chunk @@ -473,7 +473,7 @@ async def astream_chat( if chat_history is not None: self._memory.set(chat_history) - self._memory.put(ChatMessage(content=message, role="user")) + self._memory.put(ChatMessage(content=message, role=MessageRole.USER)) current_reasoning: List[BaseReasoningStep] = [] # start loop @@ -493,7 +493,7 @@ async def astream_chat( # iterate over stream, break out if is final answer is_done = False full_response = ChatResponse( - message=ChatMessage(content=None, role="assistant") + message=ChatMessage(content=None, role=MessageRole.ASSISTANT) ) async for latest_chunk in chat_stream: full_response = latest_chunk diff --git a/llama-index-core/llama_index/core/agent/react/output_parser.py b/llama-index-core/llama_index/core/agent/react/output_parser.py index d013fb66ccc02..4bdc8eb92bd0e 100644 --- a/llama-index-core/llama_index/core/agent/react/output_parser.py +++ b/llama-index-core/llama_index/core/agent/react/output_parser.py @@ -15,7 +15,7 @@ def extract_tool_use(input_text: str) -> Tuple[str, str, str]: pattern = ( - r"\s*Thought: (.*?)\n+Action: ([a-zA-Z0-9_]+).*?\n+Action Input: .*?(\{.*\})" + r"\s*Thought: (.*?)\n+Action: ([^\n\(\) ]+).*?\n+Action Input: .*?(\{.*\})" ) match = re.search(pattern, input_text, re.DOTALL) diff --git a/llama-index-core/llama_index/core/agent/react/step.py b/llama-index-core/llama_index/core/agent/react/step.py index 9cf1c175a6e58..bf4e6e1f0ff10 100644 --- a/llama-index-core/llama_index/core/agent/react/step.py +++ b/llama-index-core/llama_index/core/agent/react/step.py @@ -401,7 +401,9 @@ async def _aprocess_actions( tool.metadata.return_direct and not tool_output.is_error if tool else False, ) - def _handle_nonexistent_tool_name(self, reasoning_step): + def _handle_nonexistent_tool_name( + self, reasoning_step: ActionReasoningStep + ) -> ToolOutput: # We still emit a `tool_output` object to the task, so that the LLM can know # it has hallucinated in the next reasoning step. with self.callback_manager.event( @@ -648,7 +650,7 @@ def _run_step_stream( full_response = ChatResponse( message=ChatMessage(content=None, role="assistant") ) - missed_chunks_storage = [] + missed_chunks_storage: List[ChatResponse] = [] is_done = False for latest_chunk in chat_stream: full_response = latest_chunk @@ -658,6 +660,8 @@ def _run_step_stream( if is_done: break + non_streaming_agent_response = None + agent_response_stream = None if not is_done: # given react prompt outputs, call tools or return response reasoning_steps, is_done = self._process_actions( @@ -665,14 +669,15 @@ def _run_step_stream( ) task.extra_state["current_reasoning"].extend(reasoning_steps) # use _get_response to return intermediate response - agent_response: AGENT_CHAT_RESPONSE_TYPE = self._get_response( + non_streaming_agent_response = self._get_response( task.extra_state["current_reasoning"], task.extra_state["sources"] ) if is_done: - agent_response.is_dummy_stream = True + non_streaming_agent_response.is_dummy_stream = True task.extra_state["new_memory"].put( ChatMessage( - content=agent_response.response, role=MessageRole.ASSISTANT + content=non_streaming_agent_response.response, + role=MessageRole.ASSISTANT, ) ) else: @@ -681,18 +686,21 @@ def _run_step_stream( chunks=[*missed_chunks_storage, latest_chunk], chat_stream=chat_stream ) - agent_response = StreamingAgentChatResponse( + agent_response_stream = StreamingAgentChatResponse( chat_stream=response_stream, sources=task.extra_state["sources"], ) thread = Thread( - target=agent_response.write_response_to_history, + target=agent_response_stream.write_response_to_history, args=(task.extra_state["new_memory"],), kwargs={"on_stream_end_fn": partial(self.finalize_task, task)}, ) thread.start() - return self._get_task_step_response(agent_response, step, is_done) + response = agent_response_stream or non_streaming_agent_response + assert response is not None + + return self._get_task_step_response(response, step, is_done) async def _arun_step_stream( self, @@ -723,7 +731,7 @@ async def _arun_step_stream( full_response = ChatResponse( message=ChatMessage(content=None, role="assistant") ) - missed_chunks_storage = [] + missed_chunks_storage: List[ChatResponse] = [] is_done = False async for latest_chunk in chat_stream: full_response = latest_chunk @@ -733,6 +741,8 @@ async def _arun_step_stream( if is_done: break + non_streaming_agent_response = None + agent_response_stream = None if not is_done: # given react prompt outputs, call tools or return response reasoning_steps, is_done = await self._aprocess_actions( @@ -740,15 +750,16 @@ async def _arun_step_stream( ) task.extra_state["current_reasoning"].extend(reasoning_steps) # use _get_response to return intermediate response - agent_response: AGENT_CHAT_RESPONSE_TYPE = self._get_response( + non_streaming_agent_response = self._get_response( task.extra_state["current_reasoning"], task.extra_state["sources"] ) if is_done: - agent_response.is_dummy_stream = True + non_streaming_agent_response.is_dummy_stream = True task.extra_state["new_memory"].put( ChatMessage( - content=agent_response.response, role=MessageRole.ASSISTANT + content=non_streaming_agent_response.response, + role=MessageRole.ASSISTANT, ) ) else: @@ -757,23 +768,27 @@ async def _arun_step_stream( chunks=[*missed_chunks_storage, latest_chunk], chat_stream=chat_stream ) - agent_response = StreamingAgentChatResponse( + agent_response_stream = StreamingAgentChatResponse( achat_stream=response_stream, sources=task.extra_state["sources"], ) # create task to write chat response to history asyncio.create_task( - agent_response.awrite_response_to_history( + agent_response_stream.awrite_response_to_history( task.extra_state["new_memory"], on_stream_end_fn=partial(self.finalize_task, task), ) ) # wait until response writing is done - agent_response._ensure_async_setup() + agent_response_stream._ensure_async_setup() - await agent_response.is_function_false_event.wait() + assert agent_response_stream.is_function_false_event is not None + await agent_response_stream.is_function_false_event.wait() - return self._get_task_step_response(agent_response, step, is_done) + response = agent_response_stream or non_streaming_agent_response + assert response is not None + + return self._get_task_step_response(response, step, is_done) @trace_method("run_step") def run_step(self, step: TaskStep, task: Task, **kwargs: Any) -> TaskStepOutput: diff --git a/llama-index-core/llama_index/core/agent/react_multimodal/step.py b/llama-index-core/llama_index/core/agent/react_multimodal/step.py index 20fc093d6d745..c42104cbfdf44 100644 --- a/llama-index-core/llama_index/core/agent/react_multimodal/step.py +++ b/llama-index-core/llama_index/core/agent/react_multimodal/step.py @@ -134,7 +134,7 @@ def __init__( self._add_user_step_to_reasoning = partial( add_user_step_to_reasoning, - generate_chat_message_fn=generate_openai_multi_modal_chat_message, + generate_chat_message_fn=generate_openai_multi_modal_chat_message, # type: ignore ) except ImportError: raise ImportError( diff --git a/llama-index-core/llama_index/core/agent/runner/base.py b/llama-index-core/llama_index/core/agent/runner/base.py index 57b7e056a4c73..47061a3953b43 100644 --- a/llama-index-core/llama_index/core/agent/runner/base.py +++ b/llama-index-core/llama_index/core/agent/runner/base.py @@ -555,6 +555,7 @@ def finalize_response( step_output.output.sources = self.get_task(task_id).extra_state.get( "sources", [] ) + step_output.output.set_source_nodes() return cast(AGENT_CHAT_RESPONSE_TYPE, step_output.output) @@ -701,7 +702,8 @@ def stream_chat( and chat_response.is_dummy_stream ) e.on_end(payload={EventPayload.RESPONSE: chat_response}) - return chat_response + + return chat_response # type: ignore @dispatcher.span @trace_method("chat") @@ -726,7 +728,8 @@ async def astream_chat( and chat_response.is_dummy_stream ) e.on_end(payload={EventPayload.RESPONSE: chat_response}) - return chat_response + + return chat_response # type: ignore def undo_step(self, task_id: str) -> None: """Undo previous step.""" diff --git a/llama-index-core/llama_index/core/agent/runner/parallel.py b/llama-index-core/llama_index/core/agent/runner/parallel.py index 235d7f665de23..1f8547cfad572 100644 --- a/llama-index-core/llama_index/core/agent/runner/parallel.py +++ b/llama-index-core/llama_index/core/agent/runner/parallel.py @@ -147,7 +147,7 @@ def get_completed_tasks(self, **kwargs: Any) -> List[Task]: and task_state.completed_steps[-1].is_last ] - def get_task_output(self, task_id: str) -> TaskStepOutput: + def get_task_output(self, task_id: str, **kwargs: Any) -> TaskStepOutput: """Get task output.""" task_state = self.state.task_dict[task_id] if len(task_state.completed_steps) == 0: @@ -469,9 +469,9 @@ def stream_chat( chat_response = self._chat( message, chat_history, tool_choice, mode=ChatResponseMode.STREAM ) - assert isinstance(chat_response, StreamingAgentChatResponse) e.on_end(payload={EventPayload.RESPONSE: chat_response}) - return chat_response + + return chat_response # type: ignore @trace_method("chat") async def astream_chat( @@ -487,9 +487,9 @@ async def astream_chat( chat_response = await self._achat( message, chat_history, tool_choice, mode=ChatResponseMode.STREAM ) - assert isinstance(chat_response, StreamingAgentChatResponse) + e.on_end(payload={EventPayload.RESPONSE: chat_response}) - return chat_response + return chat_response # type: ignore def undo_step(self, task_id: str) -> None: """Undo previous step.""" diff --git a/llama-index-core/llama_index/core/agent/runner/planner.py b/llama-index-core/llama_index/core/agent/runner/planner.py index c89434d75d7e1..73c007ebda2d2 100644 --- a/llama-index-core/llama_index/core/agent/runner/planner.py +++ b/llama-index-core/llama_index/core/agent/runner/planner.py @@ -144,7 +144,7 @@ class StructuredPlannerAgent(BasePlanningAgentRunner): Args: agent_worker (BaseAgentWorker): step executor chat_history (Optional[List[ChatMessage]], optional): chat history. Defaults to None. - state (Optional[AgentState], optional): agent state. Defaults to None. + state (Optional[PlannerAgentState], optional): agent state. Defaults to None. memory (Optional[BaseMemory], optional): memory. Defaults to None. llm (Optional[LLM], optional): LLM. Defaults to None. callback_manager (Optional[CallbackManager], optional): callback manager. Defaults to None. @@ -171,7 +171,7 @@ def __init__( ) -> None: """Initialize.""" self.agent_worker = agent_worker - self.state = state or PlannerAgentState() + self.state: PlannerAgentState = state or PlannerAgentState() self.memory = memory or ChatMemoryBuffer.from_defaults(chat_history, llm=llm) self.tools = tools self.tool_retriever = tool_retriever @@ -231,7 +231,9 @@ def create_plan(self, input: str, **kwargs: Any) -> str: tools = self.get_tools(input) tools_str = "" for tool in tools: - tools_str += tool.metadata.name + ": " + tool.metadata.description + "\n" + tools_str += ( + (tool.metadata.name or "") + ": " + tool.metadata.description + "\n" + ) try: plan = self.llm.structured_predict( @@ -273,7 +275,9 @@ async def acreate_plan(self, input: str, **kwargs: Any) -> str: tools = self.get_tools(input) tools_str = "" for tool in tools: - tools_str += tool.metadata.name + ": " + tool.metadata.description + "\n" + tools_str += ( + (tool.metadata.name or "") + ": " + tool.metadata.description + "\n" + ) try: plan = await self.llm.astructured_predict( @@ -339,7 +343,9 @@ def get_refine_plan_prompt_kwargs( tools = self.get_tools(remaining_sub_tasks_str) tools_str = "" for tool in tools: - tools_str += tool.metadata.name + ": " + tool.metadata.description + "\n" + tools_str += ( + (tool.metadata.name or "") + ": " + tool.metadata.description + "\n" + ) # return the kwargs return { @@ -425,6 +431,7 @@ def run_task( task_id: str, mode: ChatResponseMode = ChatResponseMode.WAIT, tool_choice: Union[str, dict] = "auto", + **kwargs: Any, ) -> AGENT_CHAT_RESPONSE_TYPE: """Run a task.""" while True: @@ -450,6 +457,7 @@ async def arun_task( task_id: str, mode: ChatResponseMode = ChatResponseMode.WAIT, tool_choice: Union[str, dict] = "auto", + **kwargs: Any, ) -> AGENT_CHAT_RESPONSE_TYPE: """Run a task.""" while True: diff --git a/llama-index-core/llama_index/core/base/agent/types.py b/llama-index-core/llama_index/core/base/agent/types.py index 91973a15ffd04..69ed99b0e505c 100644 --- a/llama-index-core/llama_index/core/base/agent/types.py +++ b/llama-index-core/llama_index/core/base/agent/types.py @@ -192,8 +192,6 @@ class Task(BaseModel): class BaseAgentWorker(PromptMixin, DispatcherSpanMixin): """Base agent worker.""" - model_config = ConfigDict(arbitrary_types_allowed=True) - def _get_prompts(self) -> PromptDictType: """Get prompts.""" # TODO: the ReAct agent does not explicitly specify prompts, would need a diff --git a/llama-index-core/llama_index/core/base/base_retriever.py b/llama-index-core/llama_index/core/base/base_retriever.py index ab891a0fd0ab6..2bf86769c5f88 100644 --- a/llama-index-core/llama_index/core/base/base_retriever.py +++ b/llama-index-core/llama_index/core/base/base_retriever.py @@ -214,8 +214,8 @@ async def _ahandle_recursive_retrieval( for n in retrieved_nodes if not ( (n.node.hash, n.node.ref_doc_id) in seen - or seen.add((n.node.hash, n.node.ref_doc_id)) - ) # type: ignore[func-returns-value] + or seen.add((n.node.hash, n.node.ref_doc_id)) # type: ignore[func-returns-value] + ) ] @dispatcher.span diff --git a/llama-index-core/llama_index/core/base/embeddings/base.py b/llama-index-core/llama_index/core/base/embeddings/base.py index 240fc33c81e09..b7a5e029bd819 100644 --- a/llama-index-core/llama_index/core/base/embeddings/base.py +++ b/llama-index-core/llama_index/core/base/embeddings/base.py @@ -3,7 +3,7 @@ import asyncio from abc import abstractmethod from enum import Enum -from typing import Any, Callable, Coroutine, List, Optional, Tuple +from typing import Any, Callable, Coroutine, List, Optional, Sequence, Tuple import numpy as np from llama_index.core.bridge.pydantic import ( @@ -439,7 +439,7 @@ def similarity( """Get embedding similarity.""" return similarity(embedding1=embedding1, embedding2=embedding2, mode=mode) - def __call__(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: + def __call__(self, nodes: Sequence[BaseNode], **kwargs: Any) -> Sequence[BaseNode]: embeddings = self.get_text_embedding_batch( [node.get_content(metadata_mode=MetadataMode.EMBED) for node in nodes], **kwargs, @@ -450,7 +450,9 @@ def __call__(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: return nodes - async def acall(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: + async def acall( + self, nodes: Sequence[BaseNode], **kwargs: Any + ) -> Sequence[BaseNode]: embeddings = await self.aget_text_embedding_batch( [node.get_content(metadata_mode=MetadataMode.EMBED) for node in nodes], **kwargs, diff --git a/llama-index-core/llama_index/core/base/llms/types.py b/llama-index-core/llama_index/core/base/llms/types.py index ca6f58b5439a1..8c3248e3769f3 100644 --- a/llama-index-core/llama_index/core/base/llms/types.py +++ b/llama-index-core/llama_index/core/base/llms/types.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, AsyncGenerator, Generator, Optional, Union, List, Any +from typing import Any, AsyncGenerator, Dict, Generator, Optional, Union, List, Any from llama_index.core.bridge.pydantic import BaseModel, Field, ConfigDict from llama_index.core.constants import DEFAULT_CONTEXT_WINDOW, DEFAULT_NUM_OUTPUTS @@ -10,7 +10,7 @@ except ImportError: from pydantic import BaseModel as V2BaseModel - V1BaseModel = V2BaseModel + V1BaseModel = V2BaseModel # type: ignore class MessageRole(str, Enum): @@ -49,7 +49,7 @@ def from_str( def _recursive_serialization(self, value: Any) -> Any: if isinstance(value, (V1BaseModel, V2BaseModel)): - return value.model_dump() + return value.model_dump() # type: ignore if isinstance(value, dict): return { key: self._recursive_serialization(value) @@ -59,10 +59,10 @@ def _recursive_serialization(self, value: Any) -> Any: return [self._recursive_serialization(item) for item in value] return value - def dict(self, **kwargs: Any) -> dict: + def dict(self, **kwargs: Any) -> Dict[str, Any]: return self.model_dump(**kwargs) - def model_dump(self, **kwargs: Any) -> dict: + def model_dump(self, **kwargs: Any) -> Dict[str, Any]: # ensure all additional_kwargs are serializable msg = super().model_dump(**kwargs) diff --git a/llama-index-core/llama_index/core/base/query_pipeline/query.py b/llama-index-core/llama_index/core/base/query_pipeline/query.py index 19bc95e55f6a5..dec590ee466e6 100644 --- a/llama-index-core/llama_index/core/base/query_pipeline/query.py +++ b/llama-index-core/llama_index/core/base/query_pipeline/query.py @@ -81,7 +81,7 @@ def from_keys( """Create InputKeys from tuple.""" return cls(required_keys=required_keys, optional_keys=optional_keys or set()) - def validate(self, input_keys: Set[str]) -> None: + def validate_keys(self, input_keys: Set[str]) -> None: """Validate input keys.""" # check if required keys are present, and that keys all are in required or optional if not self.required_keys.issubset(input_keys): @@ -115,7 +115,7 @@ def from_keys( """Create OutputKeys from tuple.""" return cls(required_keys=required_keys) - def validate(self, input_keys: Set[str]) -> None: + def validate_keys(self, input_keys: Set[str]) -> None: """Validate input keys.""" # validate that input keys exactly match required keys if input_keys != self.required_keys: @@ -187,13 +187,13 @@ def _validate_component_outputs(self, output: Dict[str, Any]) -> Dict[str, Any]: def validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]: """Validate component inputs.""" # make sure set of input keys == self.input_keys - self.input_keys.validate(set(input.keys())) + self.input_keys.validate_keys(set(input.keys())) return self._validate_component_inputs(input) def validate_component_outputs(self, output: Dict[str, Any]) -> Dict[str, Any]: """Validate component outputs.""" # make sure set of output keys == self.output_keys - self.output_keys.validate(set(output.keys())) + self.output_keys.validate_keys(set(output.keys())) return self._validate_component_outputs(output) def run_component(self, **kwargs: Any) -> Dict[str, Any]: diff --git a/llama-index-core/llama_index/core/callbacks/base.py b/llama-index-core/llama_index/core/callbacks/base.py index 97139ef45dce6..db5849195c064 100644 --- a/llama-index-core/llama_index/core/callbacks/base.py +++ b/llama-index-core/llama_index/core/callbacks/base.py @@ -82,7 +82,7 @@ def __init__(self, handlers: Optional[List[BaseCallbackHandler]] = None): if cb_manager is not None: handlers = cb_manager.handlers - self.handlers = handlers + self.handlers: List[BaseCallbackHandler] = handlers self._trace_map: Dict[str, List[str]] = defaultdict(list) def on_event_start( diff --git a/llama-index-core/llama_index/core/callbacks/global_handlers.py b/llama-index-core/llama_index/core/callbacks/global_handlers.py index bf60bd45f5add..1f59c10f7b505 100644 --- a/llama-index-core/llama_index/core/callbacks/global_handlers.py +++ b/llama-index-core/llama_index/core/callbacks/global_handlers.py @@ -28,7 +28,7 @@ def create_global_handler( "Please install it using `pip install llama-index-callbacks-wandb`" ) - handler: BaseCallbackHandler = WandbCallbackHandler(**eval_params) + handler = WandbCallbackHandler(**eval_params) elif eval_mode == "openinference": try: from llama_index.callbacks.openinference import ( @@ -132,6 +132,17 @@ def create_global_handler( "Please install it using `pip install llama-index-callbacks-literalai`" ) handler = literalai_callback_handler(**eval_params) + elif eval_mode == "opik": + try: + from llama_index.callbacks.opik import ( + opik_callback_handler, + ) # pants: no-infer-dep + except ImportError: + raise ImportError( + "Opik Handler is not installed. " + "Please install it using `pip install llama-index-callbacks-opik`" + ) + handler = opik_callback_handler(**eval_params) else: raise ValueError(f"Eval mode {eval_mode} not supported.") diff --git a/llama-index-core/llama_index/core/callbacks/pythonically_printing_base_handler.py b/llama-index-core/llama_index/core/callbacks/pythonically_printing_base_handler.py index b0ac90f10128f..966763ffcec7e 100644 --- a/llama-index-core/llama_index/core/callbacks/pythonically_printing_base_handler.py +++ b/llama-index-core/llama_index/core/callbacks/pythonically_printing_base_handler.py @@ -26,13 +26,13 @@ def __init__( ) -> None: self.logger: Optional[logging.Logger] = logger super().__init__( - event_starts_to_ignore=event_starts_to_ignore, - event_ends_to_ignore=event_ends_to_ignore, + event_starts_to_ignore=event_starts_to_ignore or [], + event_ends_to_ignore=event_ends_to_ignore or [], ) - def _print(self, str) -> None: + def _print(self, print_str: str) -> None: if self.logger: - self.logger.debug(str) + self.logger.debug(print_str) else: # This branch is to preserve existing behavior. - print(str, flush=True) + print(print_str, flush=True) diff --git a/llama-index-core/llama_index/core/callbacks/token_counting.py b/llama-index-core/llama_index/core/callbacks/token_counting.py index 8f9f1766fa91c..a1b5edc207e16 100644 --- a/llama-index-core/llama_index/core/callbacks/token_counting.py +++ b/llama-index-core/llama_index/core/callbacks/token_counting.py @@ -42,7 +42,7 @@ def get_tokens_from_response( """Get the token counts from a raw response.""" raw_response = response.raw if not isinstance(raw_response, dict): - raw_response = dict(raw_response) + raw_response = dict(raw_response or {}) usage = raw_response.get("usage", {}) if usage is None: diff --git a/llama-index-core/llama_index/core/chat_engine/context.py b/llama-index-core/llama_index/core/chat_engine/context.py index 6197381a0a985..8d1f5b1b5e676 100644 --- a/llama-index-core/llama_index/core/chat_engine/context.py +++ b/llama-index-core/llama_index/core/chat_engine/context.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any, List, Optional, Tuple +from typing import Any, List, Optional, Sequence, Tuple from llama_index.core.base.base_retriever import BaseRetriever from llama_index.core.base.llms.types import ChatMessage, MessageRole @@ -151,7 +151,7 @@ def chat( self, message: str, chat_history: Optional[List[ChatMessage]] = None, - prev_chunks=None, + prev_chunks: Optional[List[NodeWithScore]] = None, ) -> AgentChatResponse: if chat_history is not None: self._memory.set(chat_history) @@ -161,11 +161,10 @@ def chat( # If the fetched context is completely empty if len(nodes) == 0 and prev_chunks is not None: - nodes = [j for i in prev_chunks for j in i] context_str = "\n\n".join( [ n.node.get_content(metadata_mode=MetadataMode.LLM).strip() - for n in nodes + for n in prev_chunks ] ) @@ -175,11 +174,16 @@ def chat( ) prefix_messages = self._get_prefix_messages_with_context(context_str_template) - prefix_messages_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + prefix_messages_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in prefix_messages]) + ) ) - ) + else: + prefix_messages_token_count = 0 + all_messages = prefix_messages + self._memory.get( initial_token_count=prefix_messages_token_count ) @@ -205,7 +209,7 @@ def stream_chat( self, message: str, chat_history: Optional[List[ChatMessage]] = None, - prev_chunks=None, + prev_chunks: Optional[List[NodeWithScore]] = None, ) -> StreamingAgentChatResponse: if chat_history is not None: self._memory.set(chat_history) @@ -216,11 +220,10 @@ def stream_chat( # If the fetched context is completely empty if len(nodes) == 0 and prev_chunks is not None: - nodes = [j for i in prev_chunks for j in i] context_str = "\n\n".join( [ n.node.get_content(metadata_mode=MetadataMode.LLM).strip() - for n in nodes + for n in prev_chunks ] ) @@ -230,11 +233,16 @@ def stream_chat( ) prefix_messages = self._get_prefix_messages_with_context(context_str_template) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = prefix_messages + self._memory.get( initial_token_count=initial_token_count ) @@ -263,7 +271,7 @@ async def achat( self, message: str, chat_history: Optional[List[ChatMessage]] = None, - prev_chunks=None, + prev_chunks: Optional[Sequence[NodeWithScore]] = None, ) -> AgentChatResponse: if chat_history is not None: self._memory.set(chat_history) @@ -273,11 +281,10 @@ async def achat( # If the fetched context is completely empty if len(nodes) == 0 and prev_chunks is not None: - nodes = [j for i in prev_chunks for j in i] context_str = "\n\n".join( [ n.node.get_content(metadata_mode=MetadataMode.LLM).strip() - for n in nodes + for n in prev_chunks ] ) @@ -287,11 +294,16 @@ async def achat( ) prefix_messages = self._get_prefix_messages_with_context(context_str_template) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = prefix_messages + self._memory.get( initial_token_count=initial_token_count ) @@ -318,7 +330,7 @@ async def astream_chat( self, message: str, chat_history: Optional[List[ChatMessage]] = None, - prev_chunks=None, + prev_chunks: Optional[Sequence[NodeWithScore]] = None, ) -> StreamingAgentChatResponse: if chat_history is not None: self._memory.set(chat_history) @@ -328,11 +340,10 @@ async def astream_chat( # If the fetched context is completely empty if len(nodes) == 0 and prev_chunks is not None: - nodes = [j for i in prev_chunks for j in i] context_str = "\n\n".join( [ n.node.get_content(metadata_mode=MetadataMode.LLM).strip() - for n in nodes + for n in prev_chunks ] ) @@ -342,11 +353,16 @@ async def astream_chat( ) prefix_messages = self._get_prefix_messages_with_context(context_str_template) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = prefix_messages + self._memory.get( initial_token_count=initial_token_count ) diff --git a/llama-index-core/llama_index/core/chat_engine/simple.py b/llama-index-core/llama_index/core/chat_engine/simple.py index 9b717e7ab4cac..bfba349eca891 100644 --- a/llama-index-core/llama_index/core/chat_engine/simple.py +++ b/llama-index-core/llama_index/core/chat_engine/simple.py @@ -76,11 +76,16 @@ def chat( if chat_history is not None: self._memory.set(chat_history) self._memory.put(ChatMessage(content=message, role="user")) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in self._prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in self._prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = self._prefix_messages + self._memory.get( initial_token_count=initial_token_count ) @@ -98,11 +103,16 @@ def stream_chat( if chat_history is not None: self._memory.set(chat_history) self._memory.put(ChatMessage(content=message, role="user")) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in self._prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in self._prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = self._prefix_messages + self._memory.get( initial_token_count=initial_token_count ) @@ -124,11 +134,16 @@ async def achat( if chat_history is not None: self._memory.set(chat_history) self._memory.put(ChatMessage(content=message, role="user")) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in self._prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in self._prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = self._prefix_messages + self._memory.get( initial_token_count=initial_token_count ) @@ -146,11 +161,16 @@ async def astream_chat( if chat_history is not None: self._memory.set(chat_history) self._memory.put(ChatMessage(content=message, role="user")) - initial_token_count = len( - self._memory.tokenizer_fn( - " ".join([(m.content or "") for m in self._prefix_messages]) + + if hasattr(self._memory, "tokenizer_fn"): + initial_token_count = len( + self._memory.tokenizer_fn( + " ".join([(m.content or "") for m in self._prefix_messages]) + ) ) - ) + else: + initial_token_count = 0 + all_messages = self._prefix_messages + self._memory.get( initial_token_count=initial_token_count ) diff --git a/llama-index-core/llama_index/core/chat_engine/types.py b/llama-index-core/llama_index/core/chat_engine/types.py index 2659252d1f09b..a1cf8392ad1a3 100644 --- a/llama-index-core/llama_index/core/chat_engine/types.py +++ b/llama-index-core/llama_index/core/chat_engine/types.py @@ -6,7 +6,7 @@ from enum import Enum from queue import Queue, Empty from threading import Event -from typing import AsyncGenerator, Generator, List, Optional, Union, Dict, Any +from typing import AsyncGenerator, Callable, Generator, List, Optional, Union, Dict, Any from llama_index.core.base.llms.types import ( ChatMessage, @@ -54,12 +54,15 @@ class AgentChatResponse: is_dummy_stream: bool = False metadata: Optional[Dict[str, Any]] = None - def __post_init__(self) -> None: + def set_source_nodes(self) -> None: if self.sources and not self.source_nodes: for tool_output in self.sources: if isinstance(tool_output.raw_output, (Response, StreamingResponse)): self.source_nodes.extend(tool_output.raw_output.source_nodes) + def __post_init__(self) -> None: + self.set_source_nodes() + def __str__(self) -> str: return self.response @@ -116,12 +119,15 @@ class StreamingAgentChatResponse: # Track if an exception occurred exception: Optional[Exception] = None - def __post_init__(self) -> None: + def set_source_nodes(self) -> None: if self.sources and not self.source_nodes: for tool_output in self.sources: if isinstance(tool_output.raw_output, (Response, StreamingResponse)): self.source_nodes.extend(tool_output.raw_output.source_nodes) + def __post_init__(self) -> None: + self.set_source_nodes() + def __str__(self) -> str: if self.is_done and not self.queue.empty() and not self.is_function: while self.queue.queue: @@ -143,6 +149,9 @@ def put_in_queue(self, delta: Optional[str]) -> None: self.is_function_not_none_thread_event.set() def aput_in_queue(self, delta: Optional[str]) -> None: + assert self.aqueue is not None + assert self.new_item_event is not None + self.aqueue.put_nowait(delta) self.new_item_event.set() @@ -150,7 +159,7 @@ def aput_in_queue(self, delta: Optional[str]) -> None: def write_response_to_history( self, memory: BaseMemory, - on_stream_end_fn: Optional[callable] = None, + on_stream_end_fn: Optional[Callable] = None, ) -> None: if self.chat_stream is None: raise ValueError( @@ -199,9 +208,12 @@ def write_response_to_history( async def awrite_response_to_history( self, memory: BaseMemory, - on_stream_end_fn: Optional[callable] = None, + on_stream_end_fn: Optional[Callable] = None, ) -> None: self._ensure_async_setup() + assert self.aqueue is not None + assert self.is_function_false_event is not None + assert self.new_item_event is not None if self.achat_stream is None: raise ValueError( @@ -268,6 +280,8 @@ def response_gen(self) -> Generator[str, None, None]: async def async_response_gen(self) -> AsyncGenerator[str, None]: self._ensure_async_setup() + assert self.aqueue is not None + while True: if not self.aqueue.empty() or not self.is_done: if self.exception is not None: diff --git a/llama-index-core/llama_index/core/command_line/mappings.json b/llama-index-core/llama_index/core/command_line/mappings.json index fe732aa3728f1..d023bd2de4435 100644 --- a/llama-index-core/llama_index/core/command_line/mappings.json +++ b/llama-index-core/llama_index/core/command_line/mappings.json @@ -437,6 +437,7 @@ "TableParams": "llama_index.vector_stores.baiduvectordb", "TableField": "llama_index.vector_stores.baiduvectordb", "TimescaleVectorStore": "llama_index.vector_stores.timescalevector", + "TablestoreVectorStore": "llama_index.vector_stores.tablestore", "DashVectorStore": "llama_index.vector_stores.dashvector", "JaguarVectorStore": "llama_index.vector_stores.jaguar", "FaissVectorStore": "llama_index.vector_stores.faiss", diff --git a/llama-index-core/llama_index/core/command_line/upgrade.py b/llama-index-core/llama_index/core/command_line/upgrade.py index 291a53f72d62d..792bf1ec5892a 100644 --- a/llama-index-core/llama_index/core/command_line/upgrade.py +++ b/llama-index-core/llama_index/core/command_line/upgrade.py @@ -2,7 +2,7 @@ import os import re from pathlib import Path -from typing import Dict, List, Tuple +from typing import Any, Dict, List, Tuple mappings_path = os.path.join(os.path.dirname(__file__), "mappings.json") @@ -13,7 +13,7 @@ def _parse_from_imports( line_idx: int, lines: List[str], verbose: bool = False, -): +) -> Tuple[List[str], List[str], List[str], int]: new_lines = [] new_installs = [] imported_modules = [] @@ -25,16 +25,16 @@ def _parse_from_imports( if "from " in line: imported_modules = [line, line.strip().split(" import ")[-1].strip()] if imported_modules[-1].startswith("("): - imported_modules[-1] = [] + imported_modules[-1] = [] # type: ignore parsing_modules = True else: - imported_modules = [line, imported_modules[-1].split(", ")] + imported_modules = [line, imported_modules[-1].split(", ")] # type: ignore if parsing_modules: if ")" in line: parsing_modules = False elif "(" not in line: - imported_modules[-1].append(line.strip().replace(",", "")) + imported_modules[-1].append(line.strip().replace(",", "")) # type: ignore if not parsing_modules and len(imported_modules) > 0: imported_module_names = [x.strip() for x in imported_modules[-1]] @@ -63,7 +63,7 @@ def _parse_from_imports( else: new_imports[new_import_parent].append(module) - for new_import_parent, new_imports in new_imports.items(): + for new_import_parent, new_imports_list in new_imports.items(): new_install_parent = new_import_parent.replace(".", "-").replace( "_", "-" ) @@ -72,8 +72,8 @@ def _parse_from_imports( if len(overlap) == 0: installed_modules.append(new_install_parent) new_installs.append(f"%pip install {new_install_parent}\n") - new_imports = ", ".join(new_imports) - new_lines.append(f"from {new_import_parent} import {new_imports}\n") + new_imports_str = ", ".join(new_imports_list) + new_lines.append(f"from {new_import_parent} import {new_imports_str}\n") parsing_modules = False new_imports = {} @@ -91,7 +91,7 @@ def _parse_hub_downloads( mappings: Dict[str, str], installed_modules: List[str], line: str, -): +) -> Tuple[List[str], List[str], List[str]]: regex = r"download_loader\([\"']([A-Z,a-z]+)[\"'][\s,a-z,A-Z,_=]*\)|download_tool\([\"']([a-z,A-Z]+)[\"'][A-Z,a-z,\s,_=]*\)" result = re.search(regex, line) new_lines = [] @@ -125,9 +125,9 @@ def parse_lines( skipped_lines = 0 for idx, line in enumerate(lines): - this_new_lines = [] - this_new_installs = [] - this_installed_modules = [] + this_new_lines: List[str] = [] + this_new_installs: List[str] = [] + this_installed_modules: List[str] = [] if skipped_lines != 0: skipped_lines -= 1 @@ -178,7 +178,7 @@ def parse_lines( return new_lines, list(set(new_installs)) -def _cell_installs_llama_hub(cell) -> bool: +def _cell_installs_llama_hub(cell: Dict[str, Any]) -> bool: lines = cell["source"] llama_hub_partial_statements = [ "pip install llama-hub", @@ -195,14 +195,14 @@ def _cell_installs_llama_hub(cell) -> bool: return False -def _format_new_installs(new_installs): +def _format_new_installs(new_installs: List[str]) -> List[str]: if new_installs: new_installs = list(set(new_installs)) return new_installs[:-1] + [new_installs[-1].replace("\n", "")] return new_installs -def upgrade_nb_file(file_path): +def upgrade_nb_file(file_path: str) -> None: print(f"\n=====================\n{file_path}\n", flush=True) with open(file_path) as f: notebook = json.load(f) diff --git a/llama-index-core/llama_index/core/download/integration.py b/llama-index-core/llama_index/core/download/integration.py index df7180bb67894..64288cd54bef0 100644 --- a/llama-index-core/llama_index/core/download/integration.py +++ b/llama-index-core/llama_index/core/download/integration.py @@ -6,7 +6,7 @@ from typing import Any -def pip_install(package: str): +def pip_install(package: str) -> None: subprocess.check_call([sys.executable, "-m", "pip", "install", package]) @@ -19,8 +19,8 @@ def download_integration(module_str: str, module_import_str: str, cls_name: str) try: module_spec = importlib.util.find_spec(module_import_str) - module = importlib.util.module_from_spec(module_spec) - module_spec.loader.exec_module(module) + module = importlib.util.module_from_spec(module_spec) # type: ignore + module_spec.loader.exec_module(module) # type: ignore pack_cls = getattr(module, cls_name) except ImportError as e: raise ImportError(f"Unable to import {cls_name}") from e diff --git a/llama-index-core/llama_index/core/download/pack.py b/llama-index-core/llama_index/core/download/pack.py index cdd99de658eb6..4ae99bfc6424e 100644 --- a/llama-index-core/llama_index/core/download/pack.py +++ b/llama-index-core/llama_index/core/download/pack.py @@ -92,7 +92,7 @@ def download_module_and_reqs( # Install dependencies if os.path.exists(pyproject_toml_path): - with ChangeDirectory(local_dir_path): + with ChangeDirectory(str(local_dir_path)): subprocess.check_call([sys.executable, "-m", "pip", "install", "."]) diff --git a/llama-index-core/llama_index/core/download/utils.py b/llama-index-core/llama_index/core/download/utils.py index 1e9af5c076b26..aa16d537525a9 100644 --- a/llama-index-core/llama_index/core/download/utils.py +++ b/llama-index-core/llama_index/core/download/utils.py @@ -98,8 +98,8 @@ def get_source_files_list(source_tree_url: str, path: str) -> List[str]: def recursive_tree_traverse( - tree_urls: List[Tuple[str, str]], acc: List[str], source_tree_url: str -): + tree_urls: List[str], acc: List[str], source_tree_url: str +) -> List[str]: """Recursively traversge Github trees to get all file paths in a folder.""" if not tree_urls: return acc @@ -148,5 +148,5 @@ def __enter__(self) -> None: self.saved_path = os.getcwd() os.chdir(self.new_path) - def __exit__(self, etype, value, traceback) -> None: + def __exit__(self, etype: object, value: object, traceback: object) -> None: os.chdir(self.saved_path) diff --git a/llama-index-core/llama_index/core/embeddings/pooling.py b/llama-index-core/llama_index/core/embeddings/pooling.py index 22a80e4787e1e..f3946b3c68bfb 100644 --- a/llama-index-core/llama_index/core/embeddings/pooling.py +++ b/llama-index-core/llama_index/core/embeddings/pooling.py @@ -15,8 +15,8 @@ class Pooling(str, Enum): def __call__(self, array: np.ndarray) -> np.ndarray: if self == self.CLS: - return self.cls_pooling(array) - return self.mean_pooling(array) + return Pooling.cls_pooling(array) + return Pooling.mean_pooling(array) @classmethod @overload diff --git a/llama-index-core/llama_index/core/embeddings/utils.py b/llama-index-core/llama_index/core/embeddings/utils.py index abc8af4645dbc..0adf8e752196c 100644 --- a/llama-index-core/llama_index/core/embeddings/utils.py +++ b/llama-index-core/llama_index/core/embeddings/utils.py @@ -56,7 +56,7 @@ def resolve_embed_model( ) # pants: no-infer-dep embed_model = OpenAIEmbedding() - validate_openai_api_key(embed_model.api_key) + validate_openai_api_key(embed_model.api_key) # type: ignore except ImportError: raise ImportError( "`llama-index-embeddings-openai` package not found, " @@ -133,6 +133,8 @@ def resolve_embed_model( print("Embeddings have been explicitly disabled. Using MockEmbedding.") embed_model = MockEmbedding(embed_dim=1) + assert isinstance(embed_model, BaseEmbedding) + embed_model.callback_manager = callback_manager or Settings.callback_manager return embed_model diff --git a/llama-index-core/llama_index/core/evaluation/base.py b/llama-index-core/llama_index/core/evaluation/base.py index 29905977c50aa..7bc5f016018d0 100644 --- a/llama-index-core/llama_index/core/evaluation/base.py +++ b/llama-index-core/llama_index/core/evaluation/base.py @@ -14,18 +14,20 @@ class EvaluationResult(BaseModel): Output of an BaseEvaluator. """ - query: Optional[str] = Field(None, description="Query string") - contexts: Optional[Sequence[str]] = Field(None, description="Context strings") - response: Optional[str] = Field(None, description="Response string") + query: Optional[str] = Field(default=None, description="Query string") + contexts: Optional[Sequence[str]] = Field( + default=None, description="Context strings" + ) + response: Optional[str] = Field(default=None, description="Response string") passing: Optional[bool] = Field( - None, description="Binary evaluation result (passing or not)" + default=None, description="Binary evaluation result (passing or not)" ) feedback: Optional[str] = Field( - None, description="Feedback or reasoning for the response" + default=None, description="Feedback or reasoning for the response" ) - score: Optional[float] = Field(None, description="Score for the response") + score: Optional[float] = Field(default=None, description="Score for the response") pairwise_source: Optional[str] = Field( - None, + default=None, description=( "Used only for pairwise and specifies whether it is from original order of" " presented answers or flipped order" diff --git a/llama-index-core/llama_index/core/evaluation/batch_runner.py b/llama-index-core/llama_index/core/evaluation/batch_runner.py index b0b45811d9044..d5990c0974b80 100644 --- a/llama-index-core/llama_index/core/evaluation/batch_runner.py +++ b/llama-index-core/llama_index/core/evaluation/batch_runner.py @@ -97,7 +97,7 @@ def __init__( self.asyncio_mod = asyncio_module(show_progress=self.show_progress) def _format_results( - self, results: List[EvaluationResult] + self, results: List[Tuple[str, EvaluationResult]] ) -> Dict[str, List[EvaluationResult]]: """Format results.""" # Format results @@ -352,7 +352,7 @@ def evaluate_response_strs( queries: Optional[List[str]] = None, response_strs: Optional[List[str]] = None, contexts_list: Optional[List[List[str]]] = None, - **eval_kwargs_lists: List, + **eval_kwargs_lists: Dict[str, Any], ) -> Dict[str, List[EvaluationResult]]: """ Evaluate query, response pairs. diff --git a/llama-index-core/llama_index/core/evaluation/dataset_generation.py b/llama-index-core/llama_index/core/evaluation/dataset_generation.py index 5925c81e0897b..1404316b8088a 100644 --- a/llama-index-core/llama_index/core/evaluation/dataset_generation.py +++ b/llama-index-core/llama_index/core/evaluation/dataset_generation.py @@ -234,7 +234,7 @@ async def _agenerate_dataset( [ Document( text=node.get_content(metadata_mode=self._metadata_mode), - metadata=node.metadata, + metadata=node.metadata, # type: ignore ) ], callback_manager=self.callback_manager, diff --git a/llama-index-core/llama_index/core/evaluation/eval_utils.py b/llama-index-core/llama_index/core/evaluation/eval_utils.py index e2601efc681ed..65cdae6208102 100644 --- a/llama-index-core/llama_index/core/evaluation/eval_utils.py +++ b/llama-index-core/llama_index/core/evaluation/eval_utils.py @@ -45,12 +45,14 @@ def get_responses( def get_results_df( - eval_results_list: List[EvaluationResult], names: List[str], metric_keys: List[str] + eval_results_list: List[Dict[str, List[EvaluationResult]]], + names: List[str], + metric_keys: List[str], ) -> Any: """Get results df. Args: - eval_results_list (List[EvaluationResult]): + eval_results_list (List[Dict[str, List[EvaluationResult]]]): List of evaluation results. names (List[str]): Names of the evaluation results. @@ -69,7 +71,9 @@ def get_results_df( metric_dict["names"] = names for metric_key in metric_keys: for eval_results in eval_results_list: - mean_score = np.array([r.score for r in eval_results[metric_key]]).mean() + mean_score = np.array( + [r.score or 0.0 for r in eval_results[metric_key]] + ).mean() metric_dict[metric_key].append(mean_score) return pd.DataFrame(metric_dict) @@ -89,7 +93,7 @@ def _download_llama_dataset_from_hub(llama_dataset_id: str) -> "LabelledRagDatas f"{tmp}", ] ) - return LabelledRagDataset.from_json(f"{tmp}/rag_dataset.json") + return LabelledRagDataset.from_json(f"{tmp}/rag_dataset.json") # type: ignore except FileNotFoundError as err: raise ValueError( "No dataset associated with the supplied `llama_dataset_id`" @@ -156,7 +160,7 @@ def upload_eval_dataset( # download `LabelledRagDataset` from llama-hub assert llama_dataset_id is not None rag_dataset = _download_llama_dataset_from_hub(llama_dataset_id) - questions = [example.query for example in rag_dataset[:]] + questions = [example.query for example in rag_dataset[:]] # type: ignore eval_questions = client.evals.create_questions( dataset_id=eval_dataset.id, diff --git a/llama-index-core/llama_index/core/evaluation/faithfulness.py b/llama-index-core/llama_index/core/evaluation/faithfulness.py index 51a732681d690..3cc20b26b521a 100644 --- a/llama-index-core/llama_index/core/evaluation/faithfulness.py +++ b/llama-index-core/llama_index/core/evaluation/faithfulness.py @@ -127,9 +127,11 @@ def __init__( self._eval_template: BasePromptTemplate if isinstance(eval_template, str): self._eval_template = PromptTemplate(eval_template) + if isinstance(eval_template, BasePromptTemplate): + self._eval_template = eval_template else: model_name = self._llm.metadata.model_name - self._eval_template = eval_template or TEMPLATES_CATALOG.get( + self._eval_template = TEMPLATES_CATALOG.get( model_name, DEFAULT_EVAL_TEMPLATE ) diff --git a/llama-index-core/llama_index/core/evaluation/multi_modal/faithfulness.py b/llama-index-core/llama_index/core/evaluation/multi_modal/faithfulness.py index 02d3241df0698..f8cc529d5fe5e 100644 --- a/llama-index-core/llama_index/core/evaluation/multi_modal/faithfulness.py +++ b/llama-index-core/llama_index/core/evaluation/multi_modal/faithfulness.py @@ -94,7 +94,7 @@ def __init__( "Please install it using `pip install llama-index-multi-modal-llms-openai`" ) - self._multi_modal_llm = OpenAIMultiModal( + self._multi_modal_llm: MultiModalLLM = OpenAIMultiModal( model="gpt-4-vision-preview", max_new_tokens=1000 ) else: diff --git a/llama-index-core/llama_index/core/evaluation/multi_modal/relevancy.py b/llama-index-core/llama_index/core/evaluation/multi_modal/relevancy.py index 693531a51dffc..8dc23b04c1f35 100644 --- a/llama-index-core/llama_index/core/evaluation/multi_modal/relevancy.py +++ b/llama-index-core/llama_index/core/evaluation/multi_modal/relevancy.py @@ -73,7 +73,7 @@ def __init__( "Please install it using `pip install llama-index-multi-modal-llms-openai`" ) - self._multi_modal_llm = OpenAIMultiModal( + self._multi_modal_llm: MultiModalLLM = OpenAIMultiModal( model="gpt-4-vision-preview", max_new_tokens=1000 ) else: diff --git a/llama-index-core/llama_index/core/evaluation/retrieval/evaluator.py b/llama-index-core/llama_index/core/evaluation/retrieval/evaluator.py index c4f75222feb0e..fd686df1567e6 100644 --- a/llama-index-core/llama_index/core/evaluation/retrieval/evaluator.py +++ b/llama-index-core/llama_index/core/evaluation/retrieval/evaluator.py @@ -1,6 +1,6 @@ """Retrieval evaluators.""" -from typing import Any, List, Optional, Sequence, Tuple +from typing import List, Optional, Tuple from llama_index.core.base.base_retriever import BaseRetriever from llama_index.core.bridge.pydantic import Field, SerializeAsAny @@ -8,9 +8,6 @@ BaseRetrievalEvaluator, RetrievalEvalMode, ) -from llama_index.core.evaluation.retrieval.metrics_base import ( - BaseRetrievalMetric, -) from llama_index.core.indices.base_retriever import BaseRetriever from llama_index.core.postprocessor.types import BaseNodePostprocessor from llama_index.core.schema import ImageNode, TextNode @@ -34,21 +31,6 @@ class RetrieverEvaluator(BaseRetrievalEvaluator): default=None, description="Optional post-processor" ) - def __init__( - self, - metrics: Sequence[BaseRetrievalMetric], - retriever: BaseRetriever, - node_postprocessors: Optional[List[BaseNodePostprocessor]] = None, - **kwargs: Any, - ) -> None: - """Init params.""" - super().__init__( - metrics=metrics, - retriever=retriever, - node_postprocessors=node_postprocessors, - **kwargs, - ) - async def _aget_retrieved_ids_and_texts( self, query: str, mode: RetrievalEvalMode = RetrievalEvalMode.TEXT ) -> Tuple[List[str], List[str]]: @@ -84,21 +66,6 @@ class MultiModalRetrieverEvaluator(BaseRetrievalEvaluator): default=None, description="Optional post-processor" ) - def __init__( - self, - metrics: Sequence[BaseRetrievalMetric], - retriever: BaseRetriever, - node_postprocessors: Optional[List[BaseNodePostprocessor]] = None, - **kwargs: Any, - ) -> None: - """Init params.""" - super().__init__( - metrics=metrics, - retriever=retriever, - node_postprocessors=node_postprocessors, - **kwargs, - ) - async def _aget_retrieved_ids_and_texts( self, query: str, mode: RetrievalEvalMode = RetrievalEvalMode.TEXT ) -> Tuple[List[str], List[str]]: diff --git a/llama-index-core/llama_index/core/evaluation/retrieval/metrics.py b/llama-index-core/llama_index/core/evaluation/retrieval/metrics.py index c03a440cd0684..aaacc027899fe 100644 --- a/llama-index-core/llama_index/core/evaluation/retrieval/metrics.py +++ b/llama-index-core/llama_index/core/evaluation/retrieval/metrics.py @@ -34,6 +34,7 @@ def compute( retrieved_ids: Optional[List[str]] = None, expected_texts: Optional[List[str]] = None, retrieved_texts: Optional[List[str]] = None, + **kwargs: Any, ) -> RetrievalMetricResult: """Compute metric based on the provided inputs. @@ -93,6 +94,7 @@ def compute( retrieved_ids: Optional[List[str]] = None, expected_texts: Optional[List[str]] = None, retrieved_texts: Optional[List[str]] = None, + **kwargs: Any, ) -> RetrievalMetricResult: """Compute MRR based on the provided inputs and selected method. @@ -160,6 +162,7 @@ def compute( retrieved_ids: Optional[List[str]] = None, expected_texts: Optional[List[str]] = None, retrieved_texts: Optional[List[str]] = None, + **kwargs: Any, ) -> RetrievalMetricResult: """Compute precision based on the provided inputs and selected method. @@ -208,6 +211,7 @@ def compute( retrieved_ids: Optional[List[str]] = None, expected_texts: Optional[List[str]] = None, retrieved_texts: Optional[List[str]] = None, + **kwargs: Any, ) -> RetrievalMetricResult: """Compute recall based on the provided inputs and selected method. @@ -256,6 +260,7 @@ def compute( retrieved_ids: Optional[List[str]] = None, expected_texts: Optional[List[str]] = None, retrieved_texts: Optional[List[str]] = None, + **kwargs: Any, ) -> RetrievalMetricResult: """Compute average precision based on the provided inputs and selected method. @@ -337,6 +342,7 @@ def compute( retrieved_ids: Optional[List[str]] = None, expected_texts: Optional[List[str]] = None, retrieved_texts: Optional[List[str]] = None, + **kwargs: Any, ) -> RetrievalMetricResult: """Compute NDCG based on the provided inputs and selected method. diff --git a/llama-index-core/llama_index/core/extractors/interface.py b/llama-index-core/llama_index/core/extractors/interface.py index f0ae27617d744..f432fd08a558e 100644 --- a/llama-index-core/llama_index/core/extractors/interface.py +++ b/llama-index-core/llama_index/core/extractors/interface.py @@ -97,7 +97,7 @@ def extract(self, nodes: Sequence[BaseNode]) -> List[Dict]: async def aprocess_nodes( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], excluded_embed_metadata_keys: Optional[List[str]] = None, excluded_llm_metadata_keys: Optional[List[str]] = None, **kwargs: Any, @@ -131,11 +131,11 @@ async def aprocess_nodes( if isinstance(node, TextNode): cast(TextNode, node).text_template = self.node_text_template - return new_nodes + return new_nodes # type: ignore def process_nodes( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], excluded_embed_metadata_keys: Optional[List[str]] = None, excluded_llm_metadata_keys: Optional[List[str]] = None, **kwargs: Any, @@ -149,7 +149,7 @@ def process_nodes( ) ) - def __call__(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: + def __call__(self, nodes: Sequence[BaseNode], **kwargs: Any) -> List[BaseNode]: """Post process nodes parsed from documents. Allows extractors to be chained. @@ -159,7 +159,7 @@ def __call__(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: """ return self.process_nodes(nodes, **kwargs) - async def acall(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: + async def acall(self, nodes: Sequence[BaseNode], **kwargs: Any) -> List[BaseNode]: """Post process nodes parsed from documents. Allows extractors to be chained. diff --git a/llama-index-core/llama_index/core/extractors/metadata_extractors.py b/llama-index-core/llama_index/core/extractors/metadata_extractors.py index 62aa04b799ac9..f5ef8f70ad4a7 100644 --- a/llama-index-core/llama_index/core/extractors/metadata_extractors.py +++ b/llama-index-core/llama_index/core/extractors/metadata_extractors.py @@ -20,7 +20,7 @@ (similar with contrastive learning) """ -from typing import Any, Dict, List, Optional, Sequence, cast +from typing import Any, Callable, Dict, List, Optional, Sequence, cast from llama_index.core.async_utils import DEFAULT_NUM_WORKERS, run_jobs from llama_index.core.bridge.pydantic import ( @@ -45,7 +45,7 @@ what is the comprehensive title for this document? Title: """ -def add_class_name(value: Any, handler, info) -> Dict[str, Any]: +def add_class_name(value: Any, handler: Callable, info: Any) -> Dict[str, Any]: partial_result = handler(value, info) if hasattr(value, "class_name"): partial_result.update({"class_name": value.class_name()}) diff --git a/llama-index-core/llama_index/core/graph_stores/simple_labelled.py b/llama-index-core/llama_index/core/graph_stores/simple_labelled.py index de4bd0f8f2deb..32475b0bebf59 100644 --- a/llama-index-core/llama_index/core/graph_stores/simple_labelled.py +++ b/llama-index-core/llama_index/core/graph_stores/simple_labelled.py @@ -1,7 +1,7 @@ import fsspec import json import os -from typing import Any, List, Dict, Tuple, Optional +from typing import Any, List, Dict, Sequence, Tuple, Optional from llama_index.core.graph_stores.types import ( PropertyGraphStore, @@ -128,7 +128,7 @@ def get_rel_map( return triplets[:limit] - def upsert_nodes(self, nodes: List[LabelledNode]) -> None: + def upsert_nodes(self, nodes: Sequence[LabelledNode]) -> None: """Add nodes.""" for node in nodes: self.graph.add_node(node) @@ -202,7 +202,7 @@ def from_dict( # need to load nodes manually node_dicts = data["nodes"] - kg_nodes = {} + kg_nodes: Dict[str, LabelledNode] = {} for id, node_dict in node_dicts.items(): if "name" in node_dict: kg_nodes[id] = EntityNode.model_validate(node_dict) @@ -292,8 +292,8 @@ def show_jupyter_graph(self) -> None: nodes = [] edges = [] for node in self.graph.nodes.values(): - node = {"id": node.id, "properties": {"label": node.id}} - nodes.append(node) + node_dict = {"id": node.id, "properties": {"label": node.id}} + nodes.append(node_dict) for triplet in self.graph.triplets: edge = { "id": triplet[1], @@ -304,4 +304,4 @@ def show_jupyter_graph(self) -> None: edges.append(edge) w.nodes = nodes w.edges = edges - display(w) + display(w) # type: ignore[name-defined] diff --git a/llama-index-core/llama_index/core/graph_stores/types.py b/llama-index-core/llama_index/core/graph_stores/types.py index dac12ed88a29a..5e4a19fbba1e7 100644 --- a/llama-index-core/llama_index/core/graph_stores/types.py +++ b/llama-index-core/llama_index/core/graph_stores/types.py @@ -1,8 +1,18 @@ import fsspec from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional, Tuple, Set, Protocol, runtime_checkable +from typing import ( + Any, + Dict, + List, + Optional, + Tuple, + Set, + Sequence, + Protocol, + runtime_checkable, +) -from llama_index.core.bridge.pydantic import BaseModel, Field +from llama_index.core.bridge.pydantic import BaseModel, Field, SerializeAsAny from llama_index.core.graph_stores.prompts import DEFAULT_CYPHER_TEMPALTE from llama_index.core.prompts import PromptTemplate from llama_index.core.schema import BaseNode, MetadataMode @@ -109,8 +119,8 @@ def id(self) -> str: class LabelledPropertyGraph(BaseModel): """In memory labelled property graph containing entities and relations.""" - nodes: Dict[str, LabelledNode] = Field(default_factory=dict) - relations: Dict[str, Relation] = Field(default_factory=dict) + nodes: SerializeAsAny[Dict[str, LabelledNode]] = Field(default_factory=dict) + relations: SerializeAsAny[Dict[str, Relation]] = Field(default_factory=dict) triplets: Set[Tuple[str, str, str]] = Field( default_factory=set, description="List of triplets (subject, relation, object)." ) @@ -325,14 +335,14 @@ def get_llama_nodes(self, node_ids: List[str]) -> List[BaseNode]: for node in nodes: try: converted_nodes.append(metadata_dict_to_node(node.properties)) - converted_nodes[-1].set_content(node.text) + converted_nodes[-1].set_content(node.text) # type: ignore except Exception: continue return converted_nodes @abstractmethod - def upsert_nodes(self, nodes: List[LabelledNode]) -> None: + def upsert_nodes(self, nodes: Sequence[LabelledNode]) -> None: """Upsert nodes.""" ... @@ -460,7 +470,7 @@ async def aget_llama_nodes(self, node_ids: List[str]) -> List[BaseNode]: for node in nodes: try: converted_nodes.append(metadata_dict_to_node(node.properties)) - converted_nodes[-1].set_content(node.text) + converted_nodes[-1].set_content(node.text) # type: ignore except Exception: continue diff --git a/llama-index-core/llama_index/core/img_utils.py b/llama-index-core/llama_index/core/img_utils.py index 4547ddc7b300c..a7cbae2696edf 100644 --- a/llama-index-core/llama_index/core/img_utils.py +++ b/llama-index-core/llama_index/core/img_utils.py @@ -4,16 +4,17 @@ from typing import cast from PIL import Image +from PIL.ImageFile import ImageFile -def img_2_b64(image: Image, format: str = "JPEG") -> str: +def img_2_b64(image: ImageFile, format: str = "JPEG") -> str: """Convert a PIL.Image to a base64 encoded image str.""" buff = BytesIO() image.save(buff, format=format) return cast(str, base64.b64encode(buff.getvalue())) -def b64_2_img(data: str) -> Image: +def b64_2_img(data: str) -> ImageFile: """Convert base64 encoded image str to a PIL.Image.""" buff = BytesIO(base64.b64decode(data)) - return Image.open(buff) + return cast(ImageFile, Image.open(buff)) diff --git a/llama-index-core/llama_index/core/indices/base.py b/llama-index-core/llama_index/core/indices/base.py index 0a8cfe24ce948..3916e640b478c 100644 --- a/llama-index-core/llama_index/core/indices/base.py +++ b/llama-index-core/llama_index/core/indices/base.py @@ -75,7 +75,7 @@ def __init__( if index_struct is None: nodes = nodes or [] index_struct = self.build_index_from_nodes( - nodes + objects, + nodes + objects, # type: ignore **kwargs, # type: ignore ) self._index_struct = index_struct diff --git a/llama-index-core/llama_index/core/indices/document_summary/base.py b/llama-index-core/llama_index/core/indices/document_summary/base.py index 92389f3ecefdc..d94d40992d302 100644 --- a/llama-index-core/llama_index/core/indices/document_summary/base.py +++ b/llama-index-core/llama_index/core/indices/document_summary/base.py @@ -221,7 +221,9 @@ def _add_nodes_to_index( self._vector_store.add(summary_nodes_with_embedding) def _build_index_from_nodes( - self, nodes: Sequence[BaseNode] + self, + nodes: Sequence[BaseNode], + **build_kwargs: Any, ) -> IndexDocumentSummary: """Build index from nodes.""" # first get doc_id to nodes_dict, generate a summary for each doc_id, diff --git a/llama-index-core/llama_index/core/indices/empty/base.py b/llama-index-core/llama_index/core/indices/empty/base.py index 32d9d719eb2c9..b0d9d2c556e1f 100644 --- a/llama-index-core/llama_index/core/indices/empty/base.py +++ b/llama-index-core/llama_index/core/indices/empty/base.py @@ -58,7 +58,9 @@ def as_query_engine( return super().as_query_engine(llm=llm, **kwargs) - def _build_index_from_nodes(self, nodes: Sequence[BaseNode]) -> EmptyIndexStruct: + def _build_index_from_nodes( + self, nodes: Sequence[BaseNode], **build_kwargs: Any + ) -> EmptyIndexStruct: """Build the index from documents. Args: diff --git a/llama-index-core/llama_index/core/indices/keyword_table/base.py b/llama-index-core/llama_index/core/indices/keyword_table/base.py index 91bde3abb0010..759785a2ca344 100644 --- a/llama-index-core/llama_index/core/indices/keyword_table/base.py +++ b/llama-index-core/llama_index/core/indices/keyword_table/base.py @@ -165,7 +165,9 @@ async def _async_add_nodes_to_index( ) index_struct.add_node(list(keywords), n) - def _build_index_from_nodes(self, nodes: Sequence[BaseNode]) -> KeywordTable: + def _build_index_from_nodes( + self, nodes: Sequence[BaseNode], **build_kwargs: Any + ) -> KeywordTable: """Build the index from nodes.""" # do simple concatenation index_struct = KeywordTable(table={}) diff --git a/llama-index-core/llama_index/core/indices/knowledge_graph/base.py b/llama-index-core/llama_index/core/indices/knowledge_graph/base.py index a6ad5b9138703..7c2f26c905dac 100644 --- a/llama-index-core/llama_index/core/indices/knowledge_graph/base.py +++ b/llama-index-core/llama_index/core/indices/knowledge_graph/base.py @@ -131,6 +131,12 @@ def as_retriever( if len(self.index_struct.embedding_dict) > 0 and retriever_mode is None: retriever_mode = KGRetrieverMode.HYBRID + elif retriever_mode is None: + retriever_mode = KGRetrieverMode.KEYWORD + elif isinstance(retriever_mode, str): + retriever_mode = KGRetrieverMode(retriever_mode) + else: + retriever_mode = retriever_mode return KGTableRetriever( self, @@ -193,7 +199,9 @@ def _parse_triplet_response( results.append((subj, pred, obj)) return results - def _build_index_from_nodes(self, nodes: Sequence[BaseNode]) -> KG: + def _build_index_from_nodes( + self, nodes: Sequence[BaseNode], **build_kwargs: Any + ) -> KG: """Build the index from nodes.""" # do simple concatenation index_struct = self.index_struct_cls() diff --git a/llama-index-core/llama_index/core/indices/list/base.py b/llama-index-core/llama_index/core/indices/list/base.py index b5c595d060271..72a3144a4db15 100644 --- a/llama-index-core/llama_index/core/indices/list/base.py +++ b/llama-index-core/llama_index/core/indices/list/base.py @@ -92,7 +92,10 @@ def as_retriever( raise ValueError(f"Unknown retriever mode: {retriever_mode}") def _build_index_from_nodes( - self, nodes: Sequence[BaseNode], show_progress: bool = False + self, + nodes: Sequence[BaseNode], + show_progress: bool = False, + **build_kwargs: Any, ) -> IndexList: """Build the index from documents. diff --git a/llama-index-core/llama_index/core/indices/managed/base.py b/llama-index-core/llama_index/core/indices/managed/base.py index 8a7f403f61511..fbb988c70bff1 100644 --- a/llama-index-core/llama_index/core/indices/managed/base.py +++ b/llama-index-core/llama_index/core/indices/managed/base.py @@ -62,7 +62,9 @@ def update_ref_doc(self, document: Document, **update_kwargs: Any) -> None: def as_retriever(self, **kwargs: Any) -> BaseRetriever: """Return a Retriever for this managed index.""" - def _build_index_from_nodes(self, nodes: Sequence[BaseNode]) -> IndexDict: + def _build_index_from_nodes( + self, nodes: Sequence[BaseNode], **build_kwargs: Any + ) -> IndexDict: """Build the index from nodes.""" raise NotImplementedError( "_build_index_from_nodes not implemented for BaseManagedIndex." diff --git a/llama-index-core/llama_index/core/indices/multi_modal/base.py b/llama-index-core/llama_index/core/indices/multi_modal/base.py index 45c291f71f2ca..3b75b6fdbdf5a 100644 --- a/llama-index-core/llama_index/core/indices/multi_modal/base.py +++ b/llama-index-core/llama_index/core/indices/multi_modal/base.py @@ -27,7 +27,7 @@ from llama_index.core.llms.utils import LLMType from llama_index.core.multi_modal_llms import MultiModalLLM from llama_index.core.query_engine.multi_modal import SimpleMultiModalQueryEngine -from llama_index.core.schema import BaseNode, ImageNode +from llama_index.core.schema import BaseNode, ImageNode, TextNode from llama_index.core.settings import Settings from llama_index.core.storage.storage_context import StorageContext from llama_index.core.vector_stores.simple import ( @@ -194,8 +194,9 @@ def _get_node_with_embedding( id_to_text_embed_map = None if is_image: + assert all(isinstance(node, ImageNode) for node in nodes) id_to_embed_map = embed_image_nodes( - nodes, + nodes, # type: ignore embed_model=self._image_embed_model, show_progress=show_progress, ) @@ -208,7 +209,7 @@ def _get_node_with_embedding( show_progress=show_progress, ) # TODO: refactor this change of image embed model to same as text - self._image_embed_model = self._embed_model + self._image_embed_model = self._embed_model # type: ignore else: id_to_embed_map = embed_nodes( @@ -223,6 +224,7 @@ def _get_node_with_embedding( result = node.model_copy() result.embedding = embedding if is_image and id_to_text_embed_map: + assert isinstance(result, ImageNode) text_embedding = id_to_text_embed_map[node.node_id] result.text_embedding = text_embedding result.embedding = ( @@ -246,8 +248,9 @@ async def _aget_node_with_embedding( id_to_text_embed_map = None if is_image: + assert all(isinstance(node, ImageNode) for node in nodes) id_to_embed_map = await async_embed_image_nodes( - nodes, + nodes, # type: ignore embed_model=self._image_embed_model, show_progress=show_progress, ) @@ -259,7 +262,7 @@ async def _aget_node_with_embedding( show_progress=show_progress, ) # TODO: refactor this change of image embed model to same as text - self._image_embed_model = self._embed_model + self._image_embed_model = self._embed_model # type: ignore else: id_to_embed_map = await async_embed_nodes( @@ -274,6 +277,7 @@ async def _aget_node_with_embedding( result = node.model_copy() result.embedding = embedding if is_image and id_to_text_embed_map: + assert isinstance(result, ImageNode) text_embedding = id_to_text_embed_map[node.node_id] result.text_embedding = text_embedding result.embedding = ( @@ -301,7 +305,7 @@ async def _async_add_nodes_to_index( for node in nodes: if isinstance(node, ImageNode): image_nodes.append(node) - if node.text: + if isinstance(node, TextNode) and node.text: text_nodes.append(node) if len(text_nodes) > 0: @@ -317,7 +321,7 @@ async def _async_add_nodes_to_index( if len(image_nodes) > 0: # embed image nodes as images directly - image_nodes = await self._aget_node_with_embedding( + image_nodes = await self._aget_node_with_embedding( # type: ignore image_nodes, show_progress, is_image=True, @@ -355,19 +359,19 @@ def _add_nodes_to_index( return image_nodes: List[ImageNode] = [] - text_nodes: List[BaseNode] = [] + text_nodes: List[TextNode] = [] new_text_ids: List[str] = [] new_img_ids: List[str] = [] for node in nodes: if isinstance(node, ImageNode): image_nodes.append(node) - if node.text: + if isinstance(node, TextNode) and node.text: text_nodes.append(node) if len(text_nodes) > 0: # embed all nodes as text - include image nodes that have text attached - text_nodes = self._get_node_with_embedding( + text_nodes = self._get_node_with_embedding( # type: ignore text_nodes, show_progress, is_image=False ) new_text_ids = self.storage_context.vector_stores[DEFAULT_VECTOR_STORE].add( @@ -379,7 +383,7 @@ def _add_nodes_to_index( if len(image_nodes) > 0: # embed image nodes as images directly # check if we should use text embedding for images instead of default - image_nodes = self._get_node_with_embedding( + image_nodes = self._get_node_with_embedding( # type: ignore image_nodes, show_progress, is_image=True, diff --git a/llama-index-core/llama_index/core/indices/property_graph/base.py b/llama-index-core/llama_index/core/indices/property_graph/base.py index 0edb485938c9a..5bb876b04844d 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/base.py +++ b/llama-index-core/llama_index/core/indices/property_graph/base.py @@ -1,9 +1,9 @@ import asyncio -from typing import Any, Dict, List, Optional, Sequence, TYPE_CHECKING +from typing import Any, Dict, List, Optional, Sequence, Type, TYPE_CHECKING from llama_index.core.data_structs import IndexLPG from llama_index.core.base.base_retriever import BaseRetriever -from llama_index.core.base.llms.base import BaseLLM +from llama_index.core.llms import LLM from llama_index.core.embeddings.utils import EmbedType, resolve_embed_model from llama_index.core.callbacks import CallbackManager from llama_index.core.graph_stores.simple_labelled import SimplePropertyGraphStore @@ -46,7 +46,7 @@ class PropertyGraphIndex(BaseIndex[IndexLPG]): Args: nodes (Optional[Sequence[BaseNode]]): A list of nodes to insert into the index. - llm (Optional[BaseLLM]): + llm (Optional[LLM]): The language model to use for extracting triplets. Defaults to `Settings.llm`. kg_extractors (Optional[List[TransformComponent]]): A list of transformations to apply to the nodes to extract triplets. @@ -78,7 +78,7 @@ class PropertyGraphIndex(BaseIndex[IndexLPG]): def __init__( self, nodes: Optional[Sequence[BaseNode]] = None, - llm: Optional[BaseLLM] = None, + llm: Optional[LLM] = None, kg_extractors: Optional[List[TransformComponent]] = None, property_graph_store: Optional[PropertyGraphStore] = None, # vector related params @@ -117,7 +117,7 @@ def __init__( else Settings.embed_model ) else: - self._embed_model = None + self._embed_model = None # type: ignore self._kg_extractors = kg_extractors or [ SimpleLLMPathExtractor(llm=llm or Settings.llm), @@ -142,11 +142,11 @@ def __init__( @classmethod def from_existing( - cls: "PropertyGraphIndex", + cls: Type["PropertyGraphIndex"], property_graph_store: PropertyGraphStore, vector_store: Optional[BasePydanticVectorStore] = None, # general params - llm: Optional[BaseLLM] = None, + llm: Optional[LLM] = None, kg_extractors: Optional[List[TransformComponent]] = None, # vector related params use_async: bool = True, @@ -179,6 +179,8 @@ def from_existing( @property def property_graph_store(self) -> PropertyGraphStore: """Get the labelled property graph store.""" + assert self.storage_context.property_graph_store is not None + return self.storage_context.property_graph_store @property @@ -305,6 +307,8 @@ def _insert_nodes(self, nodes: Sequence[BaseNode]) -> Sequence[BaseNode]: def _insert_nodes_to_vector_index(self, nodes: List[LabelledNode]) -> None: """Insert vector nodes.""" + assert self.vector_store is not None + llama_nodes: List[TextNode] = [] for node in nodes: if node.embedding is not None: @@ -323,7 +327,9 @@ def _insert_nodes_to_vector_index(self, nodes: List[LabelledNode]) -> None: self.vector_store.add(llama_nodes) - def _build_index_from_nodes(self, nodes: Optional[Sequence[BaseNode]]) -> IndexLPG: + def _build_index_from_nodes( + self, nodes: Optional[Sequence[BaseNode]], **build_kwargs: Any + ) -> IndexLPG: """Build index from nodes.""" nodes = self._insert_nodes(nodes or []) @@ -390,6 +396,7 @@ def _insert(self, nodes: Sequence[BaseNode], **insert_kwargs: Any) -> None: """Index-specific logic for inserting nodes to the index struct.""" self._insert_nodes(nodes) + @property def ref_doc_info(self) -> Dict[str, RefDocInfo]: """Retrieve a dict mapping of ingested documents and their nodes+metadata.""" raise NotImplementedError( diff --git a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/base.py b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/base.py index a9090a64951c6..12272f6cfca0b 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/base.py +++ b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/base.py @@ -75,17 +75,19 @@ def _add_source_text( ) -> List[NodeWithScore]: """Combine retrieved nodes/triplets with their source text, using provided preamble.""" # map of ref doc id to triplets/retrieved labelled nodes - graph_node_map = {} + graph_node_map: Dict[str, List[str]] = {} for node in retrieved_nodes: - if node.node.ref_doc_id not in graph_node_map: - graph_node_map[node.node.ref_doc_id] = [] + ref_doc_id = node.node.ref_doc_id or "" + if ref_doc_id not in graph_node_map: + graph_node_map[ref_doc_id] = [] - graph_node_map[node.node.ref_doc_id].append(node.get_content()) + graph_node_map[ref_doc_id].append(node.node.get_content()) - result_nodes = [] + result_nodes: List[NodeWithScore] = [] for node_with_score in retrieved_nodes: - node = og_node_map.get(node_with_score.node.ref_doc_id, None) - if node: + mapped_node = og_node_map.get(node_with_score.node.ref_doc_id or "", None) + + if mapped_node: graph_content = graph_node_map.get(node.node_id, []) if len(graph_content) > 0: graph_content_str = "\n".join(graph_content) @@ -98,11 +100,11 @@ def _add_source_text( new_content = ( preamble_text + graph_content_str + "\n\n" + cur_content ) - node = TextNode(**node.dict()) - node.text = new_content + mapped_node = TextNode(**node.dict()) + mapped_node.text = new_content result_nodes.append( NodeWithScore( - node=node, + node=mapped_node, score=node_with_score.score, ) ) @@ -113,7 +115,9 @@ def _add_source_text( def add_source_text(self, nodes: List[NodeWithScore]) -> List[NodeWithScore]: """Combine retrieved nodes/triplets with their source text.""" - og_nodes = self._graph_store.get_llama_nodes([x.node.ref_doc_id for x in nodes]) + og_nodes = self._graph_store.get_llama_nodes( + [x.node.ref_doc_id for x in nodes if x.node.ref_doc_id is not None] + ) node_map = {node.node_id: node for node in og_nodes} return self._add_source_text(nodes, node_map) @@ -123,7 +127,7 @@ async def async_add_source_text( ) -> List[NodeWithScore]: """Combine retrieved nodes/triplets with their source text.""" og_nodes = await self._graph_store.aget_llama_nodes( - [x.node.ref_doc_id for x in nodes] + [x.node.ref_doc_id for x in nodes if x.node.ref_doc_id is not None] ) og_node_map = {node.node_id: node for node in og_nodes} diff --git a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/custom.py b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/custom.py index 8004fb4020db4..74152ffe760a4 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/custom.py +++ b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/custom.py @@ -42,7 +42,7 @@ def graph_store(self) -> PropertyGraphStore: return self._graph_store @abstractmethod - def init(self, **kwargs: Any): + def init(self, **kwargs: Any) -> None: """Initialize the retriever. Has access to all keyword arguments passed to the retriever, as well as: @@ -100,7 +100,7 @@ def _parse_custom_return_type( elif all(isinstance(item, TextNode) for item in result): return [NodeWithScore(node=item, score=1.0) for item in result] elif all(isinstance(item, NodeWithScore) for item in result): - return result + return result # type: ignore else: raise ValueError( "Invalid return type. All items in the list must be of the same type." diff --git a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/llm_synonym.py b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/llm_synonym.py index 8c71f7df2374d..a464663d74f22 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/llm_synonym.py +++ b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/llm_synonym.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Union +from typing import Any, Callable, List, Optional, Union from llama_index.core.llms.llm import LLM from llama_index.core.indices.property_graph.sub_retrievers.base import ( @@ -6,7 +6,6 @@ ) from llama_index.core.graph_stores.types import ( PropertyGraphStore, - Triplet, KG_SOURCE_REL, ) from llama_index.core.prompts import BasePromptTemplate, PromptTemplate @@ -59,9 +58,9 @@ def __init__( ] = DEFAULT_SYNONYM_EXPAND_TEMPLATE, max_keywords: int = 10, path_depth: int = 1, - output_parsing_fn: Optional[callable] = None, + output_parsing_fn: Optional[Callable] = None, llm: Optional[LLM] = None, - **kwargs, + **kwargs: Any, ) -> None: self._llm = llm or Settings.llm if isinstance(synonym_prompt, str): @@ -81,7 +80,7 @@ def _parse_llm_output(self, output: str) -> List[str]: # capitalize to normalize with ingestion return [x.strip().capitalize() for x in matches if x.strip()] - def _prepare_matches(self, matches: List[Triplet]) -> List[NodeWithScore]: + def _prepare_matches(self, matches: List[str]) -> List[NodeWithScore]: kg_nodes = self._graph_store.get(ids=matches) triplets = self._graph_store.get_rel_map( kg_nodes, diff --git a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/text_to_cypher.py b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/text_to_cypher.py index 894a67bd1991a..afdd02bda55c9 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/text_to_cypher.py +++ b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/text_to_cypher.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional, Union +from typing import Any, Callable, List, Optional, Union from llama_index.core.graph_stores.types import PropertyGraphStore from llama_index.core.indices.property_graph.sub_retrievers.base import BasePGRetriever @@ -39,7 +39,7 @@ def __init__( llm: Optional[LLM] = None, text_to_cypher_template: Optional[Union[PromptTemplate, str]] = None, response_template: Optional[str] = None, - cypher_validator: Optional[callable] = None, + cypher_validator: Optional[Callable] = None, allowed_output_fields: Optional[List[str]] = None, **kwargs: Any, ) -> None: @@ -69,25 +69,25 @@ def _parse_generated_cypher(self, cypher_query: str) -> str: def _clean_query_output(self, query_output: Any) -> Any: """Iterate the cypher response, looking for the allowed fields.""" if isinstance(query_output, dict): - filtered = {} + filtered_dict = {} for key, value in query_output.items(): if ( self.allowed_output_fields is None or key in self.allowed_output_fields ): - filtered[key] = value + filtered_dict[key] = value elif isinstance(value, (dict, list)): filtered_value = self._clean_query_output(value) if filtered_value: - filtered[key] = filtered_value - return filtered + filtered_dict[key] = filtered_value + return filtered_dict elif isinstance(query_output, list): - filtered = [] + filtered_list = [] for item in query_output: filtered_item = self._clean_query_output(item) if filtered_item: - filtered.append(filtered_item) - return filtered + filtered_list.append(filtered_item) + return filtered_list return None diff --git a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/vector.py b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/vector.py index 2ed71d8230b37..5ba6d98831aec 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/vector.py +++ b/llama-index-core/llama_index/core/indices/property_graph/sub_retrievers/vector.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import Any, List, Sequence, Optional from llama_index.core.base.embeddings.base import BaseEmbedding from llama_index.core.indices.property_graph.sub_retrievers.base import ( @@ -12,8 +12,8 @@ from llama_index.core.settings import Settings from llama_index.core.schema import BaseNode, NodeWithScore, QueryBundle from llama_index.core.vector_stores.types import ( + BasePydanticVectorStore, VectorStoreQuery, - VectorStore, MetadataFilters, ) @@ -28,7 +28,7 @@ class VectorContextRetriever(BasePGRetriever): Whether to include source text in the retrieved nodes. Defaults to True. embed_model (Optional[BaseEmbedding], optional): The embedding model to use. Defaults to Settings.embed_model. - vector_store (Optional[VectorStore], optional): + vector_store (Optional[BasePydanticVectorStore], optional): The vector store to use. Defaults to None. Should be supplied if the graph store does not support vector queries. similarity_top_k (int, optional): @@ -44,7 +44,7 @@ def __init__( graph_store: PropertyGraphStore, include_text: bool = True, embed_model: Optional[BaseEmbedding] = None, - vector_store: Optional[VectorStore] = None, + vector_store: Optional[BasePydanticVectorStore] = None, similarity_top_k: int = 4, path_depth: int = 1, similarity_score: Optional[float] = None, @@ -74,7 +74,7 @@ def _get_vector_store_query(self, query_bundle: QueryBundle) -> VectorStoreQuery **self._retriever_kwargs, ) - def _get_kg_ids(self, kg_nodes: List[BaseNode]) -> List[str]: + def _get_kg_ids(self, kg_nodes: Sequence[BaseNode]) -> List[str]: """Backward compatibility method to get kg_ids from kg_nodes.""" return [node.metadata.get(VECTOR_SOURCE_KEY, node.id_) for node in kg_nodes] diff --git a/llama-index-core/llama_index/core/indices/property_graph/transformations/dynamic_llm.py b/llama-index-core/llama_index/core/indices/property_graph/transformations/dynamic_llm.py index ae31419a60fe9..15d3c5a750dc2 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/transformations/dynamic_llm.py +++ b/llama-index-core/llama_index/core/indices/property_graph/transformations/dynamic_llm.py @@ -1,9 +1,8 @@ import asyncio -from typing import Any, Callable, List, Optional, Union, Tuple +from typing import Any, Callable, Dict, List, Optional, Sequence, Union, Tuple import re import json from llama_index.core.async_utils import run_jobs -from llama_index.core.schema import TransformComponent, BaseNode from llama_index.core.prompts import PromptTemplate from llama_index.core.llms.llm import LLM from llama_index.core.graph_stores.types import ( @@ -12,11 +11,11 @@ KG_NODES_KEY, KG_RELATIONS_KEY, ) - from llama_index.core.prompts.default_prompts import ( DEFAULT_DYNAMIC_EXTRACT_PROMPT, DEFAULT_DYNAMIC_EXTRACT_PROPS_PROMPT, ) +from llama_index.core.schema import TransformComponent, BaseNode, MetadataMode def default_parse_dynamic_triplets( @@ -132,7 +131,7 @@ def default_parse_dynamic_triplets_with_props( ) = match # Use more robust parsing for properties - def parse_props(props_str): + def parse_props(props_str: str) -> Dict[str, Any]: try: # Handle mixed quotes and convert to a proper dictionary props_str = props_str.replace("'", '"') @@ -247,15 +246,15 @@ def __init__( # convert props to name -> description format if needed if allowed_entity_props and isinstance(allowed_entity_props[0], tuple): - allowed_entity_props = [ + allowed_entity_props = [ # type: ignore f"Property `{k}` with description ({v})" - for k, v in allowed_entity_props + for k, v in allowed_entity_props # type: ignore ] if allowed_relation_props and isinstance(allowed_relation_props[0], tuple): - allowed_relation_props = [ + allowed_relation_props = [ # type: ignore f"Property `{k}` with description ({v})" - for k, v in allowed_relation_props + for k, v in allowed_relation_props # type: ignore ] super().__init__( @@ -276,7 +275,7 @@ def class_name(cls) -> str: return "DynamicLLMPathExtractor" def __call__( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any ) -> List[BaseNode]: """ Extract triples from nodes. @@ -306,10 +305,10 @@ async def _apredict_without_props(self, text: str) -> str: text=text, max_knowledge_triplets=self.max_triplets_per_chunk, allowed_entity_types=", ".join(self.allowed_entity_types) - if len(self.allowed_entity_types) > 0 + if len(self.allowed_entity_types or []) > 0 else "No entity types provided, You are free to define them.", - allowed_relation_types=", ".join(self.allowed_relation_types) - if len(self.allowed_relation_types) > 0 + allowed_relation_types=", ".join(self.allowed_relation_types or []) + if len(self.allowed_relation_types or []) > 0 else "No relation types provided, You are free to define them.", ) @@ -328,10 +327,10 @@ async def _apredict_with_props(self, text: str) -> str: text=text, max_knowledge_triplets=self.max_triplets_per_chunk, allowed_entity_types=", ".join(self.allowed_entity_types) - if len(self.allowed_entity_types) > 0 + if len(self.allowed_entity_types or []) > 0 else "No entity types provided, You are free to define them.", - allowed_relation_types=", ".join(self.allowed_relation_types) - if len(self.allowed_relation_types) > 0 + allowed_relation_types=", ".join(self.allowed_relation_types or []) + if len(self.allowed_relation_types or []) > 0 else "No relation types provided, You are free to define them.", allowed_entity_properties=", ".join(self.allowed_entity_props) if self.allowed_entity_props @@ -351,7 +350,7 @@ async def _aextract(self, node: BaseNode) -> BaseNode: Returns: BaseNode: The processed node with extracted information. """ - text = node.get_content(metadata_mode="llm") + text = node.get_content(metadata_mode=MetadataMode.LLM) try: if ( self.allowed_entity_props is not None @@ -384,7 +383,7 @@ async def _aextract(self, node: BaseNode) -> BaseNode: return node async def acall( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any ) -> List[BaseNode]: """ Asynchronously extract triples from multiple nodes. diff --git a/llama-index-core/llama_index/core/indices/property_graph/transformations/implicit.py b/llama-index-core/llama_index/core/indices/property_graph/transformations/implicit.py index 1ced2567bb667..e5dfecdf3c8ce 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/transformations/implicit.py +++ b/llama-index-core/llama_index/core/indices/property_graph/transformations/implicit.py @@ -1,5 +1,5 @@ from tqdm import tqdm -from typing import Any, List +from typing import Any, Sequence from llama_index.core.schema import TransformComponent, BaseNode, NodeRelationship from llama_index.core.graph_stores.types import Relation, KG_NODES_KEY, KG_RELATIONS_KEY @@ -20,8 +20,8 @@ def class_name(cls) -> str: return "ImplicitPathExtractor" def __call__( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any - ) -> List[BaseNode]: + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any + ) -> Sequence[BaseNode]: """Extract edges from node relationships.""" if show_progress: nodes = tqdm(nodes, desc="Extracting implicit paths") diff --git a/llama-index-core/llama_index/core/indices/property_graph/transformations/schema_llm.py b/llama-index-core/llama_index/core/indices/property_graph/transformations/schema_llm.py index 65ffbf6b64254..8489c2651508d 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/transformations/schema_llm.py +++ b/llama-index-core/llama_index/core/indices/property_graph/transformations/schema_llm.py @@ -1,11 +1,11 @@ import asyncio -from typing import Any, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, Dict, List, Literal, Optional, Sequence, Tuple, Union try: - from typing import TypeAlias + from typing import TypeAlias # type: ignore except ImportError: # python 3.8 and 3.9 compatibility - TypeAlias = Any + from typing import Any as TypeAlias # type: ignore from llama_index.core.async_utils import run_jobs from llama_index.core.bridge.pydantic import create_model, field_validator @@ -21,7 +21,7 @@ get_relation_class, ) from llama_index.core.prompts import PromptTemplate -from llama_index.core.schema import TransformComponent, BaseNode +from llama_index.core.schema import TransformComponent, BaseNode, MetadataMode from llama_index.core.llms.llm import LLM @@ -137,7 +137,7 @@ class SchemaLLMPathExtractor(TransformComponent): def __init__( self, llm: LLM, - extract_prompt: Union[PromptTemplate, str] = None, + extract_prompt: Optional[Union[PromptTemplate, str]] = None, possible_entities: Optional[TypeAlias] = None, possible_entity_props: Optional[Union[List[str], List[Tuple[str, str]]]] = None, possible_relations: Optional[TypeAlias] = None, @@ -146,7 +146,7 @@ def __init__( ] = None, strict: bool = True, kg_schema_cls: Any = None, - kg_validation_schema: Union[Dict[str, str], List[Triple]] = None, + kg_validation_schema: Optional[Union[Dict[str, str], List[Triple]]] = None, max_triplets_per_chunk: int = 10, num_workers: int = 4, ) -> None: @@ -158,24 +158,24 @@ def __init__( if kg_schema_cls is None: possible_entities = possible_entities or DEFAULT_ENTITIES if possible_entity_props and isinstance(possible_entity_props[0], tuple): - entity_props = [ + entity_props = [ # type: ignore f"Property label `{k}` with description ({v})" for k, v in possible_entity_props ] else: - entity_props = possible_entity_props + entity_props = possible_entity_props # type: ignore entity_cls = get_entity_class(possible_entities, entity_props, strict) possible_relations = possible_relations or DEFAULT_RELATIONS if possible_relation_props and isinstance( possible_relation_props[0], tuple ): - relation_props = [ + relation_props = [ # type: ignore f"Property label `{k}` with description ({v})" for k, v in possible_relation_props ] else: - relation_props = possible_relation_props + relation_props = possible_relation_props # type: ignore relation_cls = get_relation_class( possible_relations, relation_props, strict ) @@ -210,8 +210,8 @@ def validate(v: Any) -> Any: root = field_validator("triplets", mode="before")(validate) kg_schema_cls = create_model( "KGSchema", - __validators__={"validator1": root}, - triplets=(List[triplet_cls], ...), + __validators__={"validator1": root}, # type: ignore + triplets=(List[triplet_cls], ...), # type: ignore ) kg_schema_cls.__doc__ = "Knowledge Graph Schema." @@ -219,7 +219,7 @@ def validate(v: Any) -> Any: kg_validation_schema = kg_validation_schema or DEFAULT_VALIDATION_SCHEMA # TODO: Remove this in a future version & encourage List[Triple] for validation schema if isinstance(kg_validation_schema, list): - kg_validation_schema = {"relationships": kg_validation_schema} + kg_validation_schema = {"relationships": kg_validation_schema} # type: ignore # flatten tuples now that we don't need the descriptions if possible_relation_props and isinstance(possible_relation_props[0], tuple): @@ -245,13 +245,13 @@ def class_name(cls) -> str: return "SchemaLLMPathExtractor" def __call__( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any ) -> List[BaseNode]: """Extract triplets from nodes.""" return asyncio.run(self.acall(nodes, show_progress=show_progress, **kwargs)) def _prune_invalid_props( - self, props: Dict[str, Any], allowed_props: List[str] + self, props: Dict[str, Any], allowed_props: Optional[List[str]] ) -> Dict[str, Any]: """Prune invalid properties.""" if not allowed_props: @@ -267,7 +267,7 @@ def _prune_invalid_props( return props - def _prune_invalid_triplets(self, kg_schema: Any) -> List[Triplet]: + def _prune_invalid_triplets(self, kg_schema: Any) -> Sequence[Triplet]: """Prune invalid triplets.""" assert isinstance(kg_schema, self.kg_schema_cls) @@ -275,7 +275,7 @@ def _prune_invalid_triplets(self, kg_schema: Any) -> List[Triplet]: for triplet in kg_schema.triplets: subject = triplet.subject.name subject_type = triplet.subject.type - subject_props = {} + subject_props: Dict[str, Any] = {} if hasattr(triplet.subject, "properties"): subject_props = triplet.subject.properties or {} if self.strict: @@ -285,7 +285,7 @@ def _prune_invalid_triplets(self, kg_schema: Any) -> List[Triplet]: ) relation = triplet.relation.type - relation_props = {} + relation_props: Dict[str, Any] = {} if hasattr(triplet.relation, "properties"): relation_props = triplet.relation.properties or {} if self.strict: @@ -296,7 +296,7 @@ def _prune_invalid_triplets(self, kg_schema: Any) -> List[Triplet]: obj = triplet.object.name obj_type = triplet.object.type - obj_props = {} + obj_props: Dict[str, Any] = {} if hasattr(triplet.object, "properties"): obj_props = triplet.object.properties or {} if self.strict: @@ -349,7 +349,7 @@ async def _aextract(self, node: BaseNode) -> BaseNode: """Extract triplets from a node.""" assert hasattr(node, "text") - text = node.get_content(metadata_mode="llm") + text = node.get_content(metadata_mode=MetadataMode.LLM) try: kg_schema = await self.llm.astructured_predict( self.kg_schema_cls, @@ -380,7 +380,7 @@ async def _aextract(self, node: BaseNode) -> BaseNode: return node async def acall( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any ) -> List[BaseNode]: """Extract triplets from nodes async.""" jobs = [] diff --git a/llama-index-core/llama_index/core/indices/property_graph/transformations/simple_llm.py b/llama-index-core/llama_index/core/indices/property_graph/transformations/simple_llm.py index 130c3ca3d3d4d..3536dedf5075b 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/transformations/simple_llm.py +++ b/llama-index-core/llama_index/core/indices/property_graph/transformations/simple_llm.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any, List, Callable, Optional, Union +from typing import Any, Callable, Optional, Sequence, Union from llama_index.core.async_utils import run_jobs from llama_index.core.indices.property_graph.utils import ( @@ -16,7 +16,7 @@ from llama_index.core.prompts.default_prompts import ( DEFAULT_KG_TRIPLET_EXTRACT_PROMPT, ) -from llama_index.core.schema import TransformComponent, BaseNode +from llama_index.core.schema import TransformComponent, BaseNode, MetadataMode class SimpleLLMPathExtractor(TransformComponent): @@ -70,8 +70,8 @@ def class_name(cls) -> str: return "SimpleLLMPathExtractor" def __call__( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any - ) -> List[BaseNode]: + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any + ) -> Sequence[BaseNode]: """Extract triples from nodes.""" return asyncio.run(self.acall(nodes, show_progress=show_progress, **kwargs)) @@ -79,7 +79,7 @@ async def _aextract(self, node: BaseNode) -> BaseNode: """Extract triples from a node.""" assert hasattr(node, "text") - text = node.get_content(metadata_mode="llm") + text = node.get_content(metadata_mode=MetadataMode.LLM) try: llm_response = await self.llm.apredict( self.extract_prompt, @@ -113,8 +113,8 @@ async def _aextract(self, node: BaseNode) -> BaseNode: return node async def acall( - self, nodes: List[BaseNode], show_progress: bool = False, **kwargs: Any - ) -> List[BaseNode]: + self, nodes: Sequence[BaseNode], show_progress: bool = False, **kwargs: Any + ) -> Sequence[BaseNode]: """Extract triples from nodes async.""" jobs = [] for node in nodes: diff --git a/llama-index-core/llama_index/core/indices/property_graph/transformations/utils.py b/llama-index-core/llama_index/core/indices/property_graph/transformations/utils.py index a02c3db43a532..5cf9b441310f1 100644 --- a/llama-index-core/llama_index/core/indices/property_graph/transformations/utils.py +++ b/llama-index-core/llama_index/core/indices/property_graph/transformations/utils.py @@ -1,10 +1,10 @@ from typing import Any, Dict, List, Optional try: - from typing import TypeAlias + from typing import TypeAlias # type: ignore except ImportError: # python 3.8 and 3.9 compatibility - TypeAlias = Any + from typing import Any as TypeAlias # type: ignore from llama_index.core.bridge.pydantic import create_model, Field diff --git a/llama-index-core/llama_index/core/indices/query/embedding_utils.py b/llama-index-core/llama_index/core/indices/query/embedding_utils.py index 7b83ca53d40d5..be95c556916de 100644 --- a/llama-index-core/llama_index/core/indices/query/embedding_utils.py +++ b/llama-index-core/llama_index/core/indices/query/embedding_utils.py @@ -27,7 +27,7 @@ def get_top_k_embeddings( similarity_heap: List[Tuple[float, Any]] = [] for i, emb in enumerate(embeddings_np): - similarity = similarity_fn(query_embedding_np, emb) + similarity = similarity_fn(query_embedding_np, emb) # type: ignore[arg-type] if similarity_cutoff is None or similarity > similarity_cutoff: heapq.heappush(similarity_heap, (similarity, embedding_ids[i])) if similarity_top_k and len(similarity_heap) > similarity_top_k: diff --git a/llama-index-core/llama_index/core/indices/query/query_transform/base.py b/llama-index-core/llama_index/core/indices/query/query_transform/base.py index 3d734a5dec7cb..128709818e537 100644 --- a/llama-index-core/llama_index/core/indices/query/query_transform/base.py +++ b/llama-index-core/llama_index/core/indices/query/query_transform/base.py @@ -183,7 +183,7 @@ def __init__( """Init params.""" super().__init__() self._llm = llm or Settings.llm - self._decompose_query_prompt = ( + self._decompose_query_prompt: BasePromptTemplate = ( decompose_query_prompt or DEFAULT_DECOMPOSE_QUERY_TRANSFORM_PROMPT ) self.verbose = verbose @@ -244,7 +244,9 @@ def __init__( augmenting query with image output instructions. """ self._width = width - self._query_prompt = query_prompt or DEFAULT_IMAGE_OUTPUT_PROMPT + self._query_prompt: BasePromptTemplate = ( + query_prompt or DEFAULT_IMAGE_OUTPUT_PROMPT + ) def _get_prompts(self) -> PromptDictType: """Get prompts.""" @@ -288,7 +290,7 @@ def __init__( """Init params.""" super().__init__() self._llm = llm or Settings.llm - self._step_decompose_query_prompt = ( + self._step_decompose_query_prompt: BasePromptTemplate = ( step_decompose_query_prompt or DEFAULT_STEP_DECOMPOSE_QUERY_TRANSFORM_PROMPT ) self.verbose = verbose diff --git a/llama-index-core/llama_index/core/indices/registry.py b/llama-index-core/llama_index/core/indices/registry.py index 2f2097d59a44f..0366bc32c703d 100644 --- a/llama-index-core/llama_index/core/indices/registry.py +++ b/llama-index-core/llama_index/core/indices/registry.py @@ -22,7 +22,7 @@ IndexStructType.KEYWORD_TABLE: KeywordTableIndex, IndexStructType.VECTOR_STORE: VectorStoreIndex, IndexStructType.SQL: SQLStructStoreIndex, - IndexStructType.PANDAS: PandasIndex, + IndexStructType.PANDAS: PandasIndex, # type: ignore IndexStructType.KG: KnowledgeGraphIndex, IndexStructType.SIMPLE_LPG: PropertyGraphIndex, IndexStructType.EMPTY: EmptyIndex, diff --git a/llama-index-core/llama_index/core/indices/struct_store/json_query.py b/llama-index-core/llama_index/core/indices/struct_store/json_query.py index 26ede4714eb53..0b8164a5c3620 100644 --- a/llama-index-core/llama_index/core/indices/struct_store/json_query.py +++ b/llama-index-core/llama_index/core/indices/struct_store/json_query.py @@ -42,7 +42,7 @@ def default_output_response_parser(llm_output: str) -> str: """Attempts to parse the JSON path prompt output. Only applicable if the default prompt is used.""" try: - llm_output_parsed = re.search( + llm_output_parsed = re.search( # type: ignore pattern=r"JSONPath:\s+(.*)", string=llm_output ).groups()[0] except Exception: @@ -56,7 +56,7 @@ def default_output_response_parser(llm_output: str) -> str: return llm_output_parsed -def default_output_processor(llm_output: str, json_value: JSONType) -> JSONType: +def default_output_processor(llm_output: str, json_value: JSONType) -> Dict[str, str]: """Default output processor that extracts values based on JSON Path expressions.""" # Post-process the LLM output to remove the JSONPath: prefix llm_output = llm_output.replace("JSONPath: ", "").replace("JSON Path: ", "").strip() @@ -71,7 +71,7 @@ def default_output_processor(llm_output: str, json_value: JSONType) -> JSONType: IMPORT_ERROR_MSG = "You need to install jsonpath-ng to use this function!" raise ImportError(IMPORT_ERROR_MSG) from exc - results = {} + results: Dict[str, str] = {} for expression in expressions: try: diff --git a/llama-index-core/llama_index/core/indices/struct_store/sql.py b/llama-index-core/llama_index/core/indices/struct_store/sql.py index 6aae76c0f7597..aff78f41f70f2 100644 --- a/llama-index-core/llama_index/core/indices/struct_store/sql.py +++ b/llama-index-core/llama_index/core/indices/struct_store/sql.py @@ -102,7 +102,9 @@ def __init__( def ref_doc_id_column(self) -> Optional[str]: return self._ref_doc_id_column - def _build_index_from_nodes(self, nodes: Sequence[BaseNode]) -> SQLStructTable: + def _build_index_from_nodes( + self, nodes: Sequence[BaseNode], **build_kwargs: Any + ) -> SQLStructTable: """Build index from nodes.""" index_struct = self.index_struct_cls() if len(nodes) == 0: diff --git a/llama-index-core/llama_index/core/indices/struct_store/sql_query.py b/llama-index-core/llama_index/core/indices/struct_store/sql_query.py index be92c9ae2a647..1cf82413fd1f8 100644 --- a/llama-index-core/llama_index/core/indices/struct_store/sql_query.py +++ b/llama-index-core/llama_index/core/indices/struct_store/sql_query.py @@ -402,7 +402,7 @@ def _query(self, query_bundle: QueryBundle) -> RESPONSE_TYPE: return cast(StreamingResponse, response) return cast(Response, response) else: - response_str = "\n".join([node.node.text for node in retrieved_nodes]) + response_str = "\n".join([node.text for node in retrieved_nodes]) return Response(response=response_str, metadata=metadata) async def _aquery(self, query_bundle: QueryBundle) -> RESPONSE_TYPE: @@ -434,7 +434,7 @@ async def _aquery(self, query_bundle: QueryBundle) -> RESPONSE_TYPE: return cast(AsyncStreamingResponse, response) return cast(Response, response) else: - response_str = "\n".join([node.node.text for node in retrieved_nodes]) + response_str = "\n".join([node.text for node in retrieved_nodes]) return Response(response=response_str, metadata=metadata) diff --git a/llama-index-core/llama_index/core/indices/tree/base.py b/llama-index-core/llama_index/core/indices/tree/base.py index fa5025d4d2e24..13679b1e1a2cb 100644 --- a/llama-index-core/llama_index/core/indices/tree/base.py +++ b/llama-index-core/llama_index/core/indices/tree/base.py @@ -134,7 +134,9 @@ def _validate_build_tree_required(self, retriever_mode: TreeRetrieverMode) -> No f"but retriever mode {retriever_mode} requires trees." ) - def _build_index_from_nodes(self, nodes: Sequence[BaseNode]) -> IndexGraph: + def _build_index_from_nodes( + self, nodes: Sequence[BaseNode], **build_kwargs: Any + ) -> IndexGraph: """Build the index from nodes.""" index_builder = GPTTreeIndexBuilder( self.num_children, diff --git a/llama-index-core/llama_index/core/indices/tree/select_leaf_retriever.py b/llama-index-core/llama_index/core/indices/tree/select_leaf_retriever.py index 1aa59c01d1f89..98ad03e4b42ed 100644 --- a/llama-index-core/llama_index/core/indices/tree/select_leaf_retriever.py +++ b/llama-index-core/llama_index/core/indices/tree/select_leaf_retriever.py @@ -133,7 +133,7 @@ def _query_with_selected_node( cur_response = response_builder.get_response( query_str, [node_text], prev_response=prev_response ) - cur_response = cast(str, cur_response) + cur_response = str(cur_response) logger.debug(f">[Level {level}] Current answer response: {cur_response} ") else: cur_response = self._query_level( @@ -154,7 +154,7 @@ def _query_with_selected_node( ) logger.debug(f">[Level {level}] Current refined response: {cur_response} ") - return cur_response + return str(cur_response) def _query_level( self, diff --git a/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/auto_retriever.py b/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/auto_retriever.py index 7af51bd93e110..7ee71b3fb6db3 100644 --- a/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/auto_retriever.py +++ b/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/auto_retriever.py @@ -19,7 +19,7 @@ OutputParserException, StructuredOutput, ) -from llama_index.core.prompts.base import PromptTemplate +from llama_index.core.prompts.base import BasePromptTemplate, PromptTemplate from llama_index.core.prompts.mixin import PromptDictType from llama_index.core.schema import IndexNode, QueryBundle from llama_index.core.settings import Settings @@ -95,7 +95,7 @@ def __init__( prompt_template_str or DEFAULT_VECTOR_STORE_QUERY_PROMPT_TMPL ) self._output_parser = VectorStoreQueryOutputParser() - self._prompt = PromptTemplate(template=prompt_template_str) + self._prompt: BasePromptTemplate = PromptTemplate(template=prompt_template_str) # additional config self._max_top_k = max_top_k @@ -190,7 +190,7 @@ async def agenerate_retrieval_spec( # parse output return self._parse_generated_spec(output, query_bundle) - def _build_retriever_from_spec( + def _build_retriever_from_spec( # type: ignore self, spec: VectorStoreQuerySpec ) -> Tuple[BaseRetriever, QueryBundle]: # construct new query bundle from query_spec diff --git a/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py b/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py index 3232b9c06a973..9eb68ae496b54 100644 --- a/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py +++ b/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py @@ -155,9 +155,9 @@ def _build_node_list_from_query_result( ): node_id = query_result.nodes[i].node_id if self._docstore.document_exists(node_id): - query_result.nodes[i] = self._docstore.get_node( + query_result.nodes[i] = self._docstore.get_node( # type: ignore node_id - ) # type: ignore[index] + ) log_vector_store_query_result(query_result) diff --git a/llama-index-core/llama_index/core/ingestion/cache.py b/llama-index-core/llama_index/core/ingestion/cache.py index 3ba238c8d0528..2e0420d1853ff 100644 --- a/llama-index-core/llama_index/core/ingestion/cache.py +++ b/llama-index-core/llama_index/core/ingestion/cache.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional, Sequence import fsspec from llama_index.core.bridge.pydantic import BaseModel, Field, ConfigDict @@ -25,7 +25,7 @@ class IngestionCache(BaseModel): # TODO: add async get/put methods? def put( - self, key: str, nodes: List[BaseNode], collection: Optional[str] = None + self, key: str, nodes: Sequence[BaseNode], collection: Optional[str] = None ) -> None: """Put a value into the cache.""" collection = collection or self.collection @@ -35,7 +35,7 @@ def put( def get( self, key: str, collection: Optional[str] = None - ) -> Optional[List[BaseNode]]: + ) -> Optional[Sequence[BaseNode]]: """Get a value from the cache.""" collection = collection or self.collection node_dicts = self.cache.get(key, collection=collection) diff --git a/llama-index-core/llama_index/core/ingestion/data_sinks.py b/llama-index-core/llama_index/core/ingestion/data_sinks.py index 50dd9bb757325..d64c275e2e3eb 100644 --- a/llama-index-core/llama_index/core/ingestion/data_sinks.py +++ b/llama-index-core/llama_index/core/ingestion/data_sinks.py @@ -22,38 +22,38 @@ class DataSink(BaseModel): ) -def build_conifurable_data_sink_enum(): - """ - Build an enum of configurable data sinks. - But conditional on if the corresponding vector store is available. - """ +class ConfigurableComponent(Enum): + @classmethod + def from_component( + cls, component: BasePydanticVectorStore + ) -> "ConfigurableComponent": + component_class = type(component) + for component_type in cls: + if component_type.value.component_type == component_class: + return component_type + raise ValueError( + f"Component {component} is not a supported data sink component." + ) - class ConfigurableComponent(Enum): - @classmethod - def from_component( - cls, component: BasePydanticVectorStore - ) -> "ConfigurableDataSinks": - component_class = type(component) - for component_type in cls: - if component_type.value.component_type == component_class: - return component_type + def build_configured_data_sink( + self, component: BasePydanticVectorStore + ) -> "ConfiguredDataSink": + component_type = self.value.component_type + if not isinstance(component, component_type): raise ValueError( - f"Component {component} is not a supported data sink component." + f"The enum value {self} is not compatible with component of " + f"type {type(component)}" ) + return ConfiguredDataSink[component_type]( # type: ignore + component=component, name=self.value.name + ) - def build_configured_data_sink( - self, component: BasePydanticVectorStore - ) -> "ConfiguredDataSink": - component_type = self.value.component_type - if not isinstance(component, component_type): - raise ValueError( - f"The enum value {self} is not compatible with component of " - f"type {type(component)}" - ) - return ConfiguredDataSink[component_type]( # type: ignore - component=component, name=self.value.name - ) +def build_conifurable_data_sink_enum() -> ConfigurableComponent: + """ + Build an enum of configurable data sinks. + But conditional on if the corresponding vector store is available. + """ enum_members = [] try: @@ -141,7 +141,7 @@ def build_configured_data_sink( except (ImportError, ValidationError): pass - return ConfigurableComponent("ConfigurableDataSinks", enum_members) + return ConfigurableComponent("ConfigurableDataSinks", enum_members) # type: ignore ConfigurableDataSinks = build_conifurable_data_sink_enum() @@ -176,5 +176,5 @@ def from_component(cls, component: BasePydanticVectorStore) -> "ConfiguredDataSi ).build_configured_data_sink(component) @property - def configurable_data_sink_type(self) -> ConfigurableDataSinks: + def configurable_data_sink_type(self) -> ConfigurableDataSinks: # type: ignore return ConfigurableDataSinks.from_component(self.component) diff --git a/llama-index-core/llama_index/core/ingestion/data_sources.py b/llama-index-core/llama_index/core/ingestion/data_sources.py index 76e1283a617c3..bb7cb6fc96ec6 100644 --- a/llama-index-core/llama_index/core/ingestion/data_sources.py +++ b/llama-index-core/llama_index/core/ingestion/data_sources.py @@ -48,51 +48,51 @@ def lazy_load_data(self, *args: Any, **load_kwargs: Any) -> Iterable[Document]: return self.documents -def build_configurable_data_source_enum(): - """ - Build an enum of configurable data sources. - But conditional on if the corresponding reader is available. - """ +class ConfigurableComponent(Enum): + @classmethod + def from_component(cls, component: BaseComponent) -> "ConfigurableComponent": + component_class = type(component) + for component_type in cls: + if component_type.value.component_type == component_class: + return component_type + raise ValueError( + f"Component {component} is not a supported data source component." + ) - class ConfigurableComponent(Enum): - @classmethod - def from_component(cls, component: BaseComponent) -> "ConfigurableDataSources": - component_class = type(component) - for component_type in cls: - if component_type.value.component_type == component_class: - return component_type + def build_configured_data_source( + self, component: BaseComponent, name: Optional[str] = None + ) -> "ConfiguredDataSource": + component_type = self.value.component_type + if not isinstance(component, component_type): raise ValueError( - f"Component {component} is not a supported data source component." + f"The enum value {self} is not compatible with component of " + f"type {type(component)}" ) + elif isinstance(component, BasePydanticReader): + reader_config = ReaderConfig(reader=component) + return ConfiguredDataSource[ReaderConfig]( + component=reader_config + ) # type: ignore + + if isinstance(component, DocumentGroup) and name is None: + # if the component is a DocumentGroup, we want to use the + # full file path as the name of the data source + component = cast(DocumentGroup, component) + name = component.file_path + + if name is None: + suffix = uuid.uuid1() + name = self.value.name + f" [{suffix}]]" + return ConfiguredDataSource[component_type]( # type: ignore + component=component, name=name + ) - def build_configured_data_source( - self, component: BaseComponent, name: Optional[str] = None - ) -> "ConfiguredDataSource": - component_type = self.value.component_type - if not isinstance(component, component_type): - raise ValueError( - f"The enum value {self} is not compatible with component of " - f"type {type(component)}" - ) - elif isinstance(component, BasePydanticReader): - reader_config = ReaderConfig(loader=component) - return ConfiguredDataSource[ReaderConfig]( - component=reader_config - ) # type: ignore - - if isinstance(component, DocumentGroup) and name is None: - # if the component is a DocumentGroup, we want to use the - # full file path as the name of the data source - component = cast(DocumentGroup, component) - name = component.file_path - - if name is None: - suffix = uuid.uuid1() - name = self.value.name + f" [{suffix}]]" - return ConfiguredDataSource[component_type]( # type: ignore - component=component, name=name - ) +def build_configurable_data_source_enum() -> ConfigurableComponent: + """ + Build an enum of configurable data sources. + But conditional on if the corresponding reader is available. + """ enum_members = [] try: @@ -434,7 +434,7 @@ def build_configured_data_source( ) ) - return ConfigurableComponent("ConfigurableDataSources", enum_members) + return ConfigurableComponent("ConfigurableDataSources", enum_members) # type: ignore ConfigurableDataSources = build_configurable_data_source_enum() @@ -472,5 +472,5 @@ def from_component( ).build_configured_data_source(component, name) @property - def configurable_data_source_type(self) -> ConfigurableDataSources: + def configurable_data_source_type(self) -> ConfigurableComponent: return ConfigurableDataSources.from_component(self.component) diff --git a/llama-index-core/llama_index/core/ingestion/pipeline.py b/llama-index-core/llama_index/core/ingestion/pipeline.py index c1dbe5dc2e42c..c932b28bf18ed 100644 --- a/llama-index-core/llama_index/core/ingestion/pipeline.py +++ b/llama-index-core/llama_index/core/ingestion/pipeline.py @@ -53,7 +53,7 @@ def remove_unstable_values(s: str) -> str: def get_transformation_hash( - nodes: List[BaseNode], transformation: TransformComponent + nodes: Sequence[BaseNode], transformation: TransformComponent ) -> str: """Get the hash of a transformation.""" nodes_str = "".join( @@ -67,13 +67,13 @@ def get_transformation_hash( def run_transformations( - nodes: List[BaseNode], + nodes: Sequence[BaseNode], transformations: Sequence[TransformComponent], in_place: bool = True, cache: Optional[IngestionCache] = None, cache_collection: Optional[str] = None, **kwargs: Any, -) -> List[BaseNode]: +) -> Sequence[BaseNode]: """ Run a series of transformations on a set of nodes. @@ -103,13 +103,13 @@ def run_transformations( async def arun_transformations( - nodes: List[BaseNode], + nodes: Sequence[BaseNode], transformations: Sequence[TransformComponent], in_place: bool = True, cache: Optional[IngestionCache] = None, cache_collection: Optional[str] = None, **kwargs: Any, -) -> List[BaseNode]: +) -> Sequence[BaseNode]: """ Run a series of transformations on a set of nodes. @@ -140,13 +140,13 @@ async def arun_transformations( def arun_transformations_wrapper( - nodes: List[BaseNode], + nodes: Sequence[BaseNode], transformations: Sequence[TransformComponent], in_place: bool = True, cache: Optional[IngestionCache] = None, cache_collection: Optional[str] = None, **kwargs: Any, -) -> List[BaseNode]: +) -> Sequence[BaseNode]: """ Wrapper for async run_transformation. To be used in loop.run_in_executor within a ProcessPoolExecutor. @@ -352,29 +352,32 @@ def _get_default_transformations(self) -> List[TransformComponent]: ] def _prepare_inputs( - self, documents: Optional[List[Document]], nodes: Optional[List[BaseNode]] - ) -> List[Document]: - input_nodes: List[BaseNode] = [] + self, + documents: Optional[Sequence[Document]], + nodes: Optional[Sequence[BaseNode]], + ) -> Sequence[BaseNode]: + input_nodes: Sequence[BaseNode] = [] + if documents is not None: - input_nodes += documents + input_nodes += documents # type: ignore if nodes is not None: - input_nodes += nodes + input_nodes += nodes # type: ignore if self.documents is not None: - input_nodes += self.documents + input_nodes += self.documents # type: ignore if self.readers is not None: for reader in self.readers: - input_nodes += reader.read() + input_nodes += reader.read() # type: ignore return input_nodes def _handle_duplicates( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], store_doc_text: bool = True, - ) -> List[BaseNode]: + ) -> Sequence[BaseNode]: """Handle docstore duplicates by checking all hashes.""" assert self.docstore is not None @@ -393,9 +396,9 @@ def _handle_duplicates( def _handle_upserts( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], store_doc_text: bool = True, - ) -> List[BaseNode]: + ) -> Sequence[BaseNode]: """Handle docstore upserts by checking hashes and ids.""" assert self.docstore is not None @@ -438,8 +441,8 @@ def _handle_upserts( @staticmethod def _node_batcher( - num_batches: int, nodes: Union[List[BaseNode], List[Document]] - ) -> Generator[Union[List[BaseNode], List[Document]], Any, Any]: + num_batches: int, nodes: Union[Sequence[BaseNode], List[Document]] + ) -> Generator[Union[Sequence[BaseNode], List[Document]], Any, Any]: """Yield successive n-sized chunks from lst.""" batch_size = max(1, int(len(nodes) / num_batches)) for i in range(0, len(nodes), batch_size): @@ -450,7 +453,7 @@ def run( self, show_progress: bool = False, documents: Optional[List[Document]] = None, - nodes: Optional[List[BaseNode]] = None, + nodes: Optional[Sequence[BaseNode]] = None, cache_collection: Optional[str] = None, in_place: bool = True, store_doc_text: bool = True, @@ -467,7 +470,7 @@ def run( Args: show_progress (bool, optional): Shows execution progress bar(s). Defaults to False. documents (Optional[List[Document]], optional): Set of documents to be transformed. Defaults to None. - nodes (Optional[List[BaseNode]], optional): Set of nodes to be transformed. Defaults to None. + nodes (Optional[Sequence[BaseNode]], optional): Set of nodes to be transformed. Defaults to None. cache_collection (Optional[str], optional): Cache for transformations. Defaults to None. in_place (bool, optional): Whether transformations creates a new list for transformed nodes or modifies the array passed to `run_transformations`. Defaults to True. @@ -535,7 +538,7 @@ def run( repeat(cache_collection), ), ) - nodes = reduce(lambda x, y: x + y, nodes_parallel, []) + nodes = reduce(lambda x, y: x + y, nodes_parallel, []) # type: ignore else: nodes = run_transformations( nodes_to_run, @@ -547,6 +550,8 @@ def run( **kwargs, ) + nodes = nodes or [] + if self.vector_store is not None: nodes_with_embeddings = [n for n in nodes if n.embedding is not None] if nodes_with_embeddings: @@ -557,9 +562,9 @@ def run( # ------ async methods ------ async def _ahandle_duplicates( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], store_doc_text: bool = True, - ) -> List[BaseNode]: + ) -> Sequence[BaseNode]: """Handle docstore duplicates by checking all hashes.""" assert self.docstore is not None @@ -578,9 +583,9 @@ async def _ahandle_duplicates( async def _ahandle_upserts( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], store_doc_text: bool = True, - ) -> List[BaseNode]: + ) -> Sequence[BaseNode]: """Handle docstore upserts by checking hashes and ids.""" assert self.docstore is not None @@ -626,7 +631,7 @@ async def arun( self, show_progress: bool = False, documents: Optional[List[Document]] = None, - nodes: Optional[List[BaseNode]] = None, + nodes: Optional[Sequence[BaseNode]] = None, cache_collection: Optional[str] = None, in_place: bool = True, store_doc_text: bool = True, @@ -643,7 +648,7 @@ async def arun( Args: show_progress (bool, optional): Shows execution progress bar(s). Defaults to False. documents (Optional[List[Document]], optional): Set of documents to be transformed. Defaults to None. - nodes (Optional[List[BaseNode]], optional): Set of nodes to be transformed. Defaults to None. + nodes (Optional[Sequence[BaseNode]], optional): Set of nodes to be transformed. Defaults to None. cache_collection (Optional[str], optional): Cache for transformations. Defaults to None. in_place (bool, optional): Whether transformations creates a new list for transformed nodes or modifies the array passed to `run_transformations`. Defaults to True. @@ -716,10 +721,10 @@ async def arun( ) for batch in node_batches ] - result: List[List[BaseNode]] = await asyncio.gather(*tasks) - nodes = reduce(lambda x, y: x + y, result, []) + result: Sequence[Sequence[BaseNode]] = await asyncio.gather(*tasks) + nodes: Sequence[BaseNode] = reduce(lambda x, y: x + y, result, []) # type: ignore else: - nodes = await arun_transformations( + nodes = await arun_transformations( # type: ignore nodes_to_run, self.transformations, show_progress=show_progress, @@ -728,6 +733,9 @@ async def arun( in_place=in_place, **kwargs, ) + nodes = nodes + + nodes = nodes or [] if self.vector_store is not None: nodes_with_embeddings = [n for n in nodes if n.embedding is not None] diff --git a/llama-index-core/llama_index/core/ingestion/transformations.py b/llama-index-core/llama_index/core/ingestion/transformations.py index 92da0d27da36a..d8620e55977dc 100644 --- a/llama-index-core/llama_index/core/ingestion/transformations.py +++ b/llama-index-core/llama_index/core/ingestion/transformations.py @@ -90,38 +90,36 @@ class ConfigurableTransformation(BaseModel): ) -def build_configurable_transformation_enum(): - """ - Build an enum of configurable transformations. - But conditional on if the corresponding component is available. - """ +class ConfigurableComponent(Enum): + @classmethod + def from_component(cls, component: BaseComponent) -> "ConfigurableComponent": + component_class = type(component) + for component_type in cls: + if component_type.value.component_type == component_class: + return component_type + raise ValueError( + f"Component {component} is not a supported transformation component." + ) - class ConfigurableComponent(Enum): - @classmethod - def from_component( - cls, component: BaseComponent - ) -> "ConfigurableTransformations": - component_class = type(component) - for component_type in cls: - if component_type.value.component_type == component_class: - return component_type + def build_configured_transformation( + self, component: BaseComponent + ) -> "ConfiguredTransformation": + component_type = self.value.component_type + if not isinstance(component, component_type): raise ValueError( - f"Component {component} is not a supported transformation component." + f"The enum value {self} is not compatible with component of " + f"type {type(component)}" ) + return ConfiguredTransformation[component_type]( # type: ignore + component=component, name=self.value.name + ) - def build_configured_transformation( - self, component: BaseComponent - ) -> "ConfiguredTransformation": - component_type = self.value.component_type - if not isinstance(component, component_type): - raise ValueError( - f"The enum value {self} is not compatible with component of " - f"type {type(component)}" - ) - return ConfiguredTransformation[component_type]( # type: ignore - component=component, name=self.value.name - ) +def build_configurable_transformation_enum() -> ConfigurableComponent: + """ + Build an enum of configurable transformations. + But conditional on if the corresponding component is available. + """ enum_members = [] # Node parsers @@ -338,7 +336,7 @@ def build_configured_transformation( except (ImportError, ValidationError): pass - return ConfigurableComponent("ConfigurableTransformations", enum_members) + return ConfigurableComponent("ConfigurableTransformations", enum_members) # type: ignore ConfigurableTransformations = build_configurable_transformation_enum() @@ -376,5 +374,5 @@ def from_component(cls, component: BaseComponent) -> "ConfiguredTransformation": ).build_configured_transformation(component) @property - def configurable_transformation_type(self) -> ConfigurableTransformations: + def configurable_transformation_type(self) -> ConfigurableComponent: return ConfigurableTransformations.from_component(self.component) diff --git a/llama-index-core/llama_index/core/instrumentation/base_handler.py b/llama-index-core/llama_index/core/instrumentation/base_handler.py index 813a301ed3137..3b9bb27b7cdd6 100644 --- a/llama-index-core/llama_index/core/instrumentation/base_handler.py +++ b/llama-index-core/llama_index/core/instrumentation/base_handler.py @@ -4,5 +4,5 @@ class BaseInstrumentationHandler(ABC): @classmethod @abstractmethod - def init(cls): + def init(cls) -> None: """Initialize the instrumentation handler.""" diff --git a/llama-index-core/llama_index/core/instrumentation/dispatcher.py b/llama-index-core/llama_index/core/instrumentation/dispatcher.py index caf8477830060..8e892e17162aa 100644 --- a/llama-index-core/llama_index/core/instrumentation/dispatcher.py +++ b/llama-index-core/llama_index/core/instrumentation/dispatcher.py @@ -1,6 +1,6 @@ from contextlib import contextmanager from contextvars import ContextVar, Token -from typing import Any, List, Optional, Dict, Protocol +from typing import Any, Callable, Generator, List, Optional, Dict, Protocol import inspect import uuid from deprecated import deprecated @@ -19,11 +19,13 @@ # ContextVar for managing active instrument tags -active_instrument_tags = ContextVar("instrument_tags", default={}) +active_instrument_tags: ContextVar[Dict[str, Any]] = ContextVar( + "instrument_tags", default={} +) @contextmanager -def instrument_tags(new_tags): +def instrument_tags(new_tags: Dict[str, Any]) -> Generator[None, None, None]: token = active_instrument_tags.set(new_tags) try: yield @@ -33,7 +35,7 @@ def instrument_tags(new_tags): # Keep for backwards compatibility class EventDispatcher(Protocol): - def __call__(self, event: BaseEvent, **kwargs) -> None: + def __call__(self, event: BaseEvent, **kwargs: Any) -> None: ... @@ -98,10 +100,12 @@ def __init__( @property def parent(self) -> "Dispatcher": + assert self.manager is not None return self.manager.dispatchers[self.parent_name] @property def root(self) -> "Dispatcher": + assert self.manager is not None return self.manager.dispatchers[self.root_name] def add_event_handler(self, handler: BaseEventHandler) -> None: @@ -112,9 +116,9 @@ def add_span_handler(self, handler: BaseSpanHandler) -> None: """Add handler to set of handlers.""" self.span_handlers += [handler] - def event(self, event: BaseEvent, **kwargs) -> None: + def event(self, event: BaseEvent, **kwargs: Any) -> None: """Dispatch event to all registered handlers.""" - c = self + c: Optional["Dispatcher"] = self # Attach tags from the active context event.tags.update(active_instrument_tags.get()) @@ -158,7 +162,7 @@ def span_enter( **kwargs: Any, ) -> None: """Send notice to handlers that a span with id_ has started.""" - c = self + c: Optional["Dispatcher"] = self while c: for h in c.span_handlers: try: @@ -186,7 +190,7 @@ def span_drop( **kwargs: Any, ) -> None: """Send notice to handlers that a span with id_ is being dropped.""" - c = self + c: Optional["Dispatcher"] = self while c: for h in c.span_handlers: try: @@ -213,7 +217,7 @@ def span_exit( **kwargs: Any, ) -> None: """Send notice to handlers that a span with id_ is exiting.""" - c = self + c: Optional["Dispatcher"] = self while c: for h in c.span_handlers: try: @@ -231,7 +235,7 @@ def span_exit( else: c = c.parent - def span(self, func): + def span(self, func: Callable) -> Any: # The `span` decorator should be idempotent. try: if hasattr(func, DISPATCHER_SPAN_DECORATED_ATTR): @@ -243,7 +247,7 @@ def span(self, func): pass @wrapt.decorator - def wrapper(func, instance, args, kwargs): + def wrapper(func: Callable, instance: Any, args: list, kwargs: dict) -> Any: bound_args = inspect.signature(func).bind(*args, **kwargs) id_ = f"{func.__qualname__}-{uuid.uuid4()}" tags = active_instrument_tags.get() @@ -273,7 +277,9 @@ def wrapper(func, instance, args, kwargs): active_span_id.reset(token) @wrapt.decorator - async def async_wrapper(func, instance, args, kwargs): + async def async_wrapper( + func: Callable, instance: Any, args: list, kwargs: dict + ) -> Any: bound_args = inspect.signature(func).bind(*args, **kwargs) id_ = f"{func.__qualname__}-{uuid.uuid4()}" tags = active_instrument_tags.get() diff --git a/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py b/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py index 8377ff24fd896..4eb634c6ab081 100644 --- a/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py +++ b/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py @@ -15,5 +15,5 @@ def class_name(cls) -> str: return "BaseEventHandler" @abstractmethod - def handle(self, event: BaseEvent, **kwargs) -> Any: + def handle(self, event: BaseEvent, **kwargs: Any) -> Any: """Logic for handling event.""" diff --git a/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py b/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py index bd1025c54637e..07ccc408f1667 100644 --- a/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py +++ b/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py @@ -9,6 +9,6 @@ def class_name(cls) -> str: """Class name.""" return "NullEventHandler" - def handle(self, event: BaseEvent, **kwargs) -> Any: + def handle(self, event: BaseEvent, **kwargs: Any) -> Any: """Handle logic - null handler does nothing.""" return diff --git a/llama-index-core/llama_index/core/instrumentation/events/agent.py b/llama-index-core/llama_index/core/instrumentation/events/agent.py index 73e1c30a608c8..4c85cf46effd7 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/agent.py +++ b/llama-index-core/llama_index/core/instrumentation/events/agent.py @@ -25,7 +25,7 @@ class AgentRunStepStartEvent(BaseEvent): input: Optional[str] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "AgentRunStepStartEvent" @@ -40,7 +40,7 @@ class AgentRunStepEndEvent(BaseEvent): step_output: TaskStepOutput @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "AgentRunStepEndEvent" @@ -55,7 +55,7 @@ class AgentChatWithStepStartEvent(BaseEvent): user_msg: str @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "AgentChatWithStepStartEvent" @@ -100,7 +100,7 @@ def validate_response_type(cls: Any, response: Any) -> Any: return response @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "AgentChatWithStepEndEvent" @@ -117,6 +117,6 @@ class AgentToolCallEvent(BaseEvent): tool: ToolMetadata @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "AgentToolCallEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/base.py b/llama-index-core/llama_index/core/instrumentation/events/base.py index add0bac2a1aa3..5a2fbc4dce77c 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/base.py +++ b/llama-index-core/llama_index/core/instrumentation/events/base.py @@ -12,12 +12,12 @@ class BaseEvent(BaseModel): # copy_on_model_validation = "deep" # not supported in Pydantic V2... ) timestamp: datetime = Field(default_factory=lambda: datetime.now()) - id_: str = Field(default_factory=lambda: uuid4()) + id_: str = Field(default_factory=lambda: str(uuid4())) span_id: Optional[str] = Field(default_factory=active_span_id.get) tags: Dict[str, Any] = Field(default={}) @classmethod - def class_name(cls): + def class_name(cls) -> str: """Return class name.""" return "BaseEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py b/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py index 2ec5d95d52df0..2061ad0070185 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py +++ b/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py @@ -8,7 +8,7 @@ class StreamChatStartEvent(BaseEvent): """ @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "StreamChatStartEvent" @@ -20,7 +20,7 @@ class StreamChatEndEvent(BaseEvent): """ @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "StreamChatEndEvent" @@ -37,7 +37,7 @@ class StreamChatErrorEvent(BaseEvent): exception: Exception @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "StreamChatErrorEvent" @@ -52,6 +52,6 @@ class StreamChatDeltaReceivedEvent(BaseEvent): delta: str @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "StreamChatDeltaReceivedEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/embedding.py b/llama-index-core/llama_index/core/instrumentation/events/embedding.py index 84429a87765a3..84ea08921bb48 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/embedding.py +++ b/llama-index-core/llama_index/core/instrumentation/events/embedding.py @@ -15,7 +15,7 @@ class EmbeddingStartEvent(BaseEvent): model_dict: dict @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "EmbeddingStartEvent" @@ -33,6 +33,6 @@ class EmbeddingEndEvent(BaseEvent): embeddings: List[List[float]] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "EmbeddingEndEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/exception.py b/llama-index-core/llama_index/core/instrumentation/events/exception.py index 5101e18140597..a3f5dae288479 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/exception.py +++ b/llama-index-core/llama_index/core/instrumentation/events/exception.py @@ -11,6 +11,6 @@ class ExceptionEvent(BaseEvent): exception: BaseException @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "ExceptionEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/llm.py b/llama-index-core/llama_index/core/instrumentation/events/llm.py index 9c7749f79d805..3ed824f525fd4 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/llm.py +++ b/llama-index-core/llama_index/core/instrumentation/events/llm.py @@ -21,7 +21,7 @@ class LLMPredictStartEvent(BaseEvent): template_args: Optional[dict] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMPredictStartEvent" @@ -38,7 +38,7 @@ class LLMPredictEndEvent(BaseEvent): output: str @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMPredictEndEvent" @@ -57,7 +57,7 @@ class LLMStructuredPredictStartEvent(BaseEvent): template_args: Optional[dict] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMStructuredPredictStartEvent" @@ -72,7 +72,7 @@ class LLMStructuredPredictEndEvent(BaseEvent): output: SerializeAsAny[BaseModel] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMStructuredPredictEndEvent" @@ -87,7 +87,7 @@ class LLMStructuredPredictInProgressEvent(BaseEvent): output: SerializeAsAny[BaseModel] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMStructuredPredictInProgressEvent" @@ -107,7 +107,7 @@ class LLMCompletionStartEvent(BaseEvent): model_dict: dict @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMCompletionStartEvent" @@ -124,7 +124,7 @@ class LLMCompletionInProgressEvent(BaseEvent): response: CompletionResponse @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMCompletionInProgressEvent" @@ -141,7 +141,7 @@ class LLMCompletionEndEvent(BaseEvent): response: CompletionResponse @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMCompletionEndEvent" @@ -161,7 +161,7 @@ class LLMChatStartEvent(BaseEvent): model_dict: dict @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMChatStartEvent" @@ -178,7 +178,7 @@ class LLMChatInProgressEvent(BaseEvent): response: ChatResponse @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMChatInProgressEvent" @@ -195,6 +195,6 @@ class LLMChatEndEvent(BaseEvent): response: Optional[ChatResponse] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "LLMChatEndEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/query.py b/llama-index-core/llama_index/core/instrumentation/events/query.py index d7af9714deaa4..ea08dc155b52e 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/query.py +++ b/llama-index-core/llama_index/core/instrumentation/events/query.py @@ -13,7 +13,7 @@ class QueryStartEvent(BaseEvent): query: QueryType @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "QueryStartEvent" @@ -30,6 +30,6 @@ class QueryEndEvent(BaseEvent): response: RESPONSE_TYPE @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "QueryEndEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/rerank.py b/llama-index-core/llama_index/core/instrumentation/events/rerank.py index 91cf491b6baa1..ea6003a3cb756 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/rerank.py +++ b/llama-index-core/llama_index/core/instrumentation/events/rerank.py @@ -22,7 +22,7 @@ class ReRankStartEvent(BaseEvent): model_name: str @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "ReRankStartEvent" @@ -37,6 +37,6 @@ class ReRankEndEvent(BaseEvent): nodes: List[NodeWithScore] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "ReRankEndEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/retrieval.py b/llama-index-core/llama_index/core/instrumentation/events/retrieval.py index 54b46b2591b78..3a051a1f61e1d 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/retrieval.py +++ b/llama-index-core/llama_index/core/instrumentation/events/retrieval.py @@ -13,7 +13,7 @@ class RetrievalStartEvent(BaseEvent): str_or_query_bundle: QueryType @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "RetrievalStartEvent" @@ -30,6 +30,6 @@ class RetrievalEndEvent(BaseEvent): nodes: List[NodeWithScore] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "RetrievalEndEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/span.py b/llama-index-core/llama_index/core/instrumentation/events/span.py index 01d74e05bf127..0efa524303e7f 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/span.py +++ b/llama-index-core/llama_index/core/instrumentation/events/span.py @@ -11,6 +11,6 @@ class SpanDropEvent(BaseEvent): err_str: str @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "SpanDropEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/events/synthesis.py b/llama-index-core/llama_index/core/instrumentation/events/synthesis.py index c117d1cfc57c6..1ffde05fb0bca 100644 --- a/llama-index-core/llama_index/core/instrumentation/events/synthesis.py +++ b/llama-index-core/llama_index/core/instrumentation/events/synthesis.py @@ -15,7 +15,7 @@ class SynthesizeStartEvent(BaseEvent): query: QueryType @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "SynthesizeStartEvent" @@ -32,7 +32,7 @@ class SynthesizeEndEvent(BaseEvent): response: RESPONSE_TYPE @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "SynthesizeEndEvent" @@ -49,7 +49,7 @@ class GetResponseStartEvent(BaseEvent): text_chunks: List[str] @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "GetResponseStartEvent" @@ -61,6 +61,6 @@ class GetResponseEndEvent(BaseEvent): # response: RESPONSE_TEXT_TYPE @classmethod - def class_name(cls): + def class_name(cls) -> str: """Class name.""" return "GetResponseEndEvent" diff --git a/llama-index-core/llama_index/core/instrumentation/span/base.py b/llama-index-core/llama_index/core/instrumentation/span/base.py index 27d2ecf5e118c..4286988fe1a88 100644 --- a/llama-index-core/llama_index/core/instrumentation/span/base.py +++ b/llama-index-core/llama_index/core/instrumentation/span/base.py @@ -1,4 +1,5 @@ from typing import Any, Dict, Optional +from uuid import uuid4 from llama_index.core.bridge.pydantic import BaseModel, Field, ConfigDict @@ -6,6 +7,6 @@ class BaseSpan(BaseModel): """Base data class representing a span.""" model_config = ConfigDict(arbitrary_types_allowed=True) - id_: str = Field(default_factory=str, description="Id of span.") + id_: str = Field(default_factory=lambda: str(uuid4()), description="Id of span.") parent_id: Optional[str] = Field(default=None, description="Id of parent span.") tags: Dict[str, Any] = Field(default={}) diff --git a/llama-index-core/llama_index/core/instrumentation/span/simple.py b/llama-index-core/llama_index/core/instrumentation/span/simple.py index 7905b743f2759..de193fe8de3ce 100644 --- a/llama-index-core/llama_index/core/instrumentation/span/simple.py +++ b/llama-index-core/llama_index/core/instrumentation/span/simple.py @@ -9,5 +9,5 @@ class SimpleSpan(BaseSpan): start_time: datetime = Field(default_factory=lambda: datetime.now()) end_time: Optional[datetime] = Field(default=None) - duration: float = Field(default=float, description="Duration of span in seconds.") + duration: float = Field(default=0.0, description="Duration of span in seconds.") metadata: Optional[Dict] = Field(default=None) diff --git a/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py b/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py index c72c36b7c9df8..25de18052b45f 100644 --- a/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py +++ b/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py @@ -53,7 +53,7 @@ def prepare_to_drop_span( instance: Optional[Any] = None, err: Optional[BaseException] = None, **kwargs: Any, - ) -> SimpleSpan: + ) -> Optional[SimpleSpan]: """Logic for droppping a span.""" if id_ in self.open_spans: with self.lock: diff --git a/llama-index-core/llama_index/core/langchain_helpers/agents/toolkits.py b/llama-index-core/llama_index/core/langchain_helpers/agents/toolkits.py index 85317fcc78c93..64386ee000adc 100644 --- a/llama-index-core/llama_index/core/langchain_helpers/agents/toolkits.py +++ b/llama-index-core/llama_index/core/langchain_helpers/agents/toolkits.py @@ -3,7 +3,7 @@ from typing import List from llama_index.core.bridge.langchain import BaseTool, BaseToolkit -from llama_index.core.bridge.pydantic import Field +from llama_index.core.bridge.pydantic import ConfigDict, Field from llama_index.core.langchain_helpers.agents.tools import ( IndexToolConfig, LlamaIndexTool, diff --git a/llama-index-core/llama_index/core/langchain_helpers/agents/tools.py b/llama-index-core/llama_index/core/langchain_helpers/agents/tools.py index c0a4fa60be38c..ee0d823ab6182 100644 --- a/llama-index-core/llama_index/core/langchain_helpers/agents/tools.py +++ b/llama-index-core/llama_index/core/langchain_helpers/agents/tools.py @@ -5,7 +5,7 @@ from llama_index.core.base.base_query_engine import BaseQueryEngine from llama_index.core.base.response.schema import RESPONSE_TYPE from llama_index.core.bridge.langchain import BaseTool -from llama_index.core.bridge.pydantic import BaseModel, Field +from llama_index.core.bridge.pydantic import BaseModel, ConfigDict, Field from llama_index.core.schema import TextNode diff --git a/llama-index-core/llama_index/core/llama_dataset/base.py b/llama-index-core/llama_index/core/llama_dataset/base.py index 434c681490eb8..e1d3669783b34 100644 --- a/llama-index-core/llama_index/core/llama_dataset/base.py +++ b/llama-index-core/llama_index/core/llama_dataset/base.py @@ -3,7 +3,18 @@ import json from abc import abstractmethod from enum import Enum -from typing import Any, Generator, Generic, List, Optional, Type, TypeVar, Union +from typing import ( + Any, + ClassVar, + Generator, + Generic, + List, + Sequence, + Optional, + Type, + TypeVar, + Union, +) import tqdm from llama_index.core.async_utils import asyncio_module @@ -62,12 +73,14 @@ def class_name(self) -> str: class BaseLlamaPredictionDataset(BaseModel): - _prediction_type: Type[BaseLlamaExamplePrediction] = BaseLlamaExamplePrediction # type: ignore[misc] + _prediction_type: ClassVar[Type[BaseLlamaExamplePrediction]] predictions: List[BaseLlamaExamplePrediction] = Field( - default=list, description="Predictions on train_examples." + default_factory=list, description="Predictions on train_examples." ) - def __getitem__(self, val: Union[slice, int]) -> List[BaseLlamaExamplePrediction]: + def __getitem__( + self, val: Union[slice, int] + ) -> Union[Sequence[BaseLlamaExamplePrediction], BaseLlamaExamplePrediction]: """Enable slicing and indexing. Returns the desired slice on `predictions`. @@ -114,7 +127,7 @@ def class_name(self) -> str: class BaseLlamaDataset(BaseModel, Generic[P]): - _example_type: Type[BaseLlamaDataExample] = BaseLlamaDataExample # type: ignore[misc] + _example_type: ClassVar[Type[BaseLlamaDataExample]] examples: List[BaseLlamaDataExample] = Field( default=[], description="Data examples of this dataset." ) @@ -122,7 +135,9 @@ class BaseLlamaDataset(BaseModel, Generic[P]): default_factory=list ) - def __getitem__(self, val: Union[slice, int]) -> List[BaseLlamaDataExample]: + def __getitem__( + self, val: Union[slice, int] + ) -> Union[Sequence[BaseLlamaDataExample], BaseLlamaDataExample]: """Enable slicing and indexing. Returns the desired slice on `examples`. @@ -157,7 +172,7 @@ def from_json(cls, path: str) -> "BaseLlamaDataset": @abstractmethod def _construct_prediction_dataset( - self, predictions: List[BaseLlamaExamplePrediction] + self, predictions: Sequence[BaseLlamaExamplePrediction] ) -> BaseLlamaPredictionDataset: """Construct the specific prediction dataset. @@ -250,7 +265,7 @@ def _batch_examples( self, batch_size: int = 20, start_position: int = 0, - ) -> Generator[List[BaseLlamaDataExample], None, None]: + ) -> Generator[Sequence[BaseLlamaDataExample], None, None]: """Batches examples and predictions with a given batch_size.""" num_examples = len(self.examples) for ndx in range(start_position, num_examples, batch_size): diff --git a/llama-index-core/llama_index/core/llama_dataset/download.py b/llama-index-core/llama_index/core/llama_dataset/download.py index 0b8e518d679ae..c1cde0f03fe14 100644 --- a/llama-index-core/llama_index/core/llama_dataset/download.py +++ b/llama-index-core/llama_index/core/llama_dataset/download.py @@ -40,7 +40,7 @@ def download_llama_dataset( llama_datasets_source_files_tree_url: str = LLAMA_DATASETS_SOURCE_FILES_GITHUB_TREE_URL, show_progress: bool = False, load_documents: bool = True, -) -> Tuple[Type[BaseLlamaDataset], List[Document]]: +) -> Tuple[BaseLlamaDataset, List[Document]]: """Download dataset from datasets-LFS and llamahub. Args: diff --git a/llama-index-core/llama_index/core/llama_dataset/evaluator_evaluation.py b/llama-index-core/llama_index/core/llama_dataset/evaluator_evaluation.py index 6ef540efeed9f..11865bb87ce40 100644 --- a/llama-index-core/llama_index/core/llama_dataset/evaluator_evaluation.py +++ b/llama-index-core/llama_index/core/llama_dataset/evaluator_evaluation.py @@ -2,7 +2,7 @@ import asyncio import time -from typing import Any, List, Optional +from typing import Any, Dict, List, Sequence, Optional from llama_index.core.bridge.pydantic import Field from llama_index.core.evaluation import ( @@ -123,12 +123,17 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = {} - if self.predictions: - data = { - "feedback": [t.feedback for t in self.predictions], - "score": [t.score for t in self.predictions], - } + data: Dict[str, List] = { + "feedback": [], + "score": [], + } + for pred in self.predictions: + if not isinstance(pred, EvaluatorExamplePrediction): + raise ValueError( + "EvaluatorPredictionDataset can only contain EvaluatorExamplePrediction instances." + ) + data["feedback"].append(pred.feedback) + data["score"].append(pred.score) return pd.DataFrame(data) @@ -152,26 +157,38 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = { - "query": [t.query for t in self.examples], - "answer": [t.answer for t in self.examples], - "contexts": [t.contexts for t in self.examples], - "ground_truth_answer": [t.ground_truth_answer for t in self.examples], - "query_by": [str(t.query_by) for t in self.examples], - "answer_by": [str(t.answer_by) for t in self.examples], - "ground_truth_answer_by": [ - str(t.ground_truth_answer_by) for t in self.examples - ], - "reference_feedback": [t.reference_feedback for t in self.examples], - "reference_score": [t.reference_score for t in self.examples], - "reference_evaluation_by": [ - t.reference_evaluation_by for t in self.examples - ], + data: Dict[str, List] = { + "query": [], + "answer": [], + "contexts": [], + "ground_truth_answer": [], + "query_by": [], + "answer_by": [], + "ground_truth_answer_by": [], + "reference_feedback": [], + "reference_score": [], + "reference_evaluation_by": [], } + for example in self.examples: + if not isinstance(example, LabelledEvaluatorDataExample): + raise ValueError( + "LabelledEvaluatorDataset can only contain LabelledEvaluatorDataExample instances." + ) + data["query"].append(example.query) + data["answer"].append(example.answer) + data["contexts"].append(example.contexts) + data["ground_truth_answer"].append(example.ground_truth_answer) + data["query_by"].append(str(example.query_by)) + data["answer_by"].append(str(example.answer_by)) + data["ground_truth_answer_by"].append(str(example.ground_truth_answer_by)) + data["reference_feedback"].append(example.reference_feedback) + data["reference_score"].append(example.reference_score) + data["reference_evaluation_by"].append(str(example.reference_evaluation_by)) + return pd.DataFrame(data) - async def _apredict_example( + async def _apredict_example( # type: ignore self, predictor: BaseEvaluator, example: LabelledEvaluatorDataExample, @@ -195,14 +212,14 @@ async def _apredict_example( if not eval_result.invalid_result: return EvaluatorExamplePrediction( - feedback=eval_result.feedback, score=eval_result.score + feedback=eval_result.feedback or "", score=eval_result.score ) else: return EvaluatorExamplePrediction( invalid_prediction=True, invalid_reason=eval_result.invalid_reason ) - def _predict_example( + def _predict_example( # type: ignore self, predictor: BaseEvaluator, example: LabelledEvaluatorDataExample, @@ -226,15 +243,15 @@ def _predict_example( if not eval_result.invalid_result: return EvaluatorExamplePrediction( - feedback=eval_result.feedback, score=eval_result.score + feedback=eval_result.feedback or "", score=eval_result.score ) else: return EvaluatorExamplePrediction( invalid_prediction=True, invalid_reason=eval_result.invalid_reason ) - def _construct_prediction_dataset( - self, predictions: List[EvaluatorExamplePrediction] + def _construct_prediction_dataset( # type: ignore + self, predictions: Sequence[EvaluatorExamplePrediction] ) -> EvaluatorPredictionDataset: """Construct prediction dataset.""" return EvaluatorPredictionDataset(predictions=predictions) @@ -295,13 +312,19 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = {} - if self.predictions: - data = { - "feedback": [t.feedback for t in self.predictions], - "score": [t.score for t in self.predictions], - "ordering": [t.evaluation_source.value for t in self.predictions], - } + data: Dict[str, List] = { + "feedback": [], + "score": [], + "ordering": [], + } + for prediction in self.predictions: + if not isinstance(prediction, PairwiseEvaluatorExamplePrediction): + raise ValueError( + "PairwiseEvaluatorPredictionDataset can only contain PairwiseEvaluatorExamplePrediction instances." + ) + data["feedback"].append(prediction.feedback) + data["score"].append(prediction.score) + data["ordering"].append(str(prediction.evaluation_source)) return pd.DataFrame(data) @@ -347,28 +370,41 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = { - "query": [t.query for t in self.examples], - "answer": [t.answer for t in self.examples], - "second_answer": [t.second_answer for t in self.examples], - "contexts": [t.contexts for t in self.examples], - "ground_truth_answer": [t.ground_truth_answer for t in self.examples], - "query_by": [str(t.query_by) for t in self.examples], - "answer_by": [str(t.answer_by) for t in self.examples], - "second_answer_by": [str(t.second_answer_by) for t in self.examples], - "ground_truth_answer_by": [ - str(t.ground_truth_answer_by) for t in self.examples - ], - "reference_feedback": [t.reference_feedback for t in self.examples], - "reference_score": [t.reference_score for t in self.examples], - "reference_evaluation_by": [ - t.reference_evaluation_by for t in self.examples - ], + data: Dict[str, List] = { + "query": [], + "answer": [], + "second_answer": [], + "contexts": [], + "ground_truth_answer": [], + "query_by": [], + "answer_by": [], + "second_answer_by": [], + "ground_truth_answer_by": [], + "reference_feedback": [], + "reference_score": [], + "reference_evaluation_by": [], } + for example in self.examples: + if not isinstance(example, LabelledPairwiseEvaluatorDataExample): + raise ValueError( + "LabelledPairwiseEvaluatorDataset can only contain LabelledPairwiseEvaluatorDataExample instances." + ) + data["query"].append(example.query) + data["answer"].append(example.answer) + data["second_answer"].append(example.second_answer) + data["contexts"].append(example.contexts) + data["ground_truth_answer"].append(example.ground_truth_answer) + data["query_by"].append(str(example.query_by)) + data["answer_by"].append(str(example.answer_by)) + data["second_answer_by"].append(str(example.second_answer_by)) + data["ground_truth_answer_by"].append(str(example.ground_truth_answer_by)) + data["reference_feedback"].append(example.reference_feedback) + data["reference_score"].append(example.reference_score) + data["reference_evaluation_by"].append(str(example.reference_evaluation_by)) return pd.DataFrame(data) - async def _apredict_example( + async def _apredict_example( # type: ignore self, predictor: BaseEvaluator, example: LabelledPairwiseEvaluatorDataExample, @@ -393,16 +429,16 @@ async def _apredict_example( if not eval_result.invalid_result: return PairwiseEvaluatorExamplePrediction( - feedback=eval_result.feedback, + feedback=eval_result.feedback or "", score=eval_result.score, - evaluation_source=eval_result.pairwise_source, + evaluation_source=EvaluationSource(eval_result.pairwise_source), ) else: return PairwiseEvaluatorExamplePrediction( invalid_prediction=True, invalid_reason=eval_result.invalid_reason ) - def _predict_example( + def _predict_example( # type: ignore self, predictor: BaseEvaluator, example: LabelledPairwiseEvaluatorDataExample, @@ -427,17 +463,17 @@ def _predict_example( if not eval_result.invalid_result: return PairwiseEvaluatorExamplePrediction( - feedback=eval_result.feedback, + feedback=eval_result.feedback or "", score=eval_result.score, - evaluation_source=eval_result.pairwise_source, + evaluation_source=EvaluationSource(eval_result.pairwise_source), ) else: return PairwiseEvaluatorExamplePrediction( invalid_prediction=True, invalid_reason=eval_result.invalid_reason ) - def _construct_prediction_dataset( - self, predictions: List[PairwiseEvaluatorExamplePrediction] + def _construct_prediction_dataset( # type: ignore + self, predictions: Sequence[PairwiseEvaluatorExamplePrediction] ) -> PairwiseEvaluatorPredictionDataset: """Construct prediction dataset.""" return PairwiseEvaluatorPredictionDataset(predictions=predictions) diff --git a/llama-index-core/llama_index/core/llama_dataset/generator.py b/llama-index-core/llama_index/core/llama_dataset/generator.py index 3b81373848ce8..70560757c771a 100644 --- a/llama-index-core/llama_index/core/llama_dataset/generator.py +++ b/llama-index-core/llama_index/core/llama_dataset/generator.py @@ -4,7 +4,7 @@ import re import warnings -from typing import List, Optional +from typing import List, Sequence, Optional from llama_index.core import Document, SummaryIndex from llama_index.core.async_utils import DEFAULT_NUM_WORKERS, run_jobs, asyncio_run @@ -91,7 +91,7 @@ def __init__( @classmethod def from_documents( cls, - documents: List[Document], + documents: Sequence[Document], llm: Optional[LLM] = None, transformations: Optional[List[TransformComponent]] = None, num_questions_per_chunk: int = 3, @@ -115,7 +115,6 @@ def from_documents( required_keywords = required_keywords or [] exclude_keywords = exclude_keywords or [] node_postprocessor = KeywordNodePostprocessor( - llm=llm, required_keywords=required_keywords, exclude_keywords=exclude_keywords, ) @@ -148,7 +147,7 @@ async def _agenerate_dataset( [ Document( text=node.get_content(metadata_mode=self._metadata_mode), - metadata=node.metadata, + metadata=node.metadata, # type: ignore excluded_llm_metadata_keys=node.excluded_llm_metadata_keys, excluded_embed_metadata_keys=node.excluded_embed_metadata_keys, relationships=node.relationships, @@ -185,7 +184,7 @@ async def _agenerate_dataset( ) index = summary_indices[idx] - reference_context = nodes[idx].text + reference_context = nodes[idx].get_content(metadata_mode=MetadataMode.NONE) model_name = self._llm.metadata.model_name created_by = CreatedBy(type=CreatedByType.AI, model_name=model_name) if labelled: diff --git a/llama-index-core/llama_index/core/llama_dataset/legacy/embedding.py b/llama-index-core/llama_index/core/llama_dataset/legacy/embedding.py index e202f6cec15f5..6f6c09c4dfdf4 100644 --- a/llama-index-core/llama_index/core/llama_dataset/legacy/embedding.py +++ b/llama-index-core/llama_index/core/llama_dataset/legacy/embedding.py @@ -4,11 +4,12 @@ import re import uuid import warnings -from typing import Dict, List, Tuple +from typing import Dict, List, Optional, Tuple from llama_index.core.bridge.pydantic import BaseModel from llama_index.core.llms.utils import LLM from llama_index.core.schema import MetadataMode, TextNode +from llama_index.core.settings import Settings from tqdm import tqdm @@ -69,11 +70,12 @@ def from_json(cls, path: str) -> "EmbeddingQAFinetuneDataset": # generate queries as a convenience function def generate_qa_embedding_pairs( nodes: List[TextNode], - llm: LLM, + llm: Optional[LLM] = None, qa_generate_prompt_tmpl: str = DEFAULT_QA_GENERATE_PROMPT_TMPL, num_questions_per_chunk: int = 2, ) -> EmbeddingQAFinetuneDataset: """Generate examples given a set of nodes.""" + llm = llm or Settings.llm node_dict = { node.node_id: node.get_content(metadata_mode=MetadataMode.NONE) for node in nodes diff --git a/llama-index-core/llama_index/core/llama_dataset/rag.py b/llama-index-core/llama_index/core/llama_dataset/rag.py index 6d1c2dc915526..5ee6286dd0b9b 100644 --- a/llama-index-core/llama_index/core/llama_dataset/rag.py +++ b/llama-index-core/llama_index/core/llama_dataset/rag.py @@ -2,7 +2,7 @@ import asyncio import time -from typing import Any, List, Optional +from typing import Any, Dict, List, Optional, Sequence from llama_index.core.base.base_query_engine import BaseQueryEngine from llama_index.core.bridge.pydantic import Field @@ -91,12 +91,17 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = {} - if self.predictions: - data = { - "response": [t.response for t in self.predictions], - "contexts": [t.contexts for t in self.predictions], - } + data: Dict[str, List] = { + "response": [], + "contexts": [], + } + for pred in self.predictions: + if not isinstance(pred, RagExamplePrediction): + raise ValueError( + "All predictions in the dataset must be of type RagExamplePrediction." + ) + data["response"].append(pred.response) + data["contexts"].append(pred.contexts) return pd.DataFrame(data) @@ -120,17 +125,27 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = { - "query": [t.query for t in self.examples], - "reference_contexts": [t.reference_contexts for t in self.examples], - "reference_answer": [t.reference_answer for t in self.examples], - "reference_answer_by": [str(t.reference_answer_by) for t in self.examples], - "query_by": [str(t.query_by) for t in self.examples], + data: Dict[str, List] = { + "query": [], + "reference_contexts": [], + "reference_answer": [], + "reference_answer_by": [], + "query_by": [], } + for example in self.examples: + if not isinstance(example, LabelledRagDataExample): + raise ValueError( + "All examples in the dataset must be of type LabelledRagDataExample." + ) + data["query"].append(example.query) + data["reference_contexts"].append(example.reference_contexts) + data["reference_answer"].append(example.reference_answer) + data["reference_answer_by"].append(str(example.reference_answer_by)) + data["query_by"].append(str(example.query_by)) return pd.DataFrame(data) - async def _apredict_example( + async def _apredict_example( # type: ignore self, predictor: BaseQueryEngine, example: LabelledRagDataExample, @@ -143,7 +158,7 @@ async def _apredict_example( response=str(response), contexts=[s.text for s in response.source_nodes] ) - def _predict_example( + def _predict_example( # type: ignore self, predictor: BaseQueryEngine, example: LabelledRagDataExample, @@ -156,8 +171,8 @@ def _predict_example( response=str(response), contexts=[s.text for s in response.source_nodes] ) - def _construct_prediction_dataset( - self, predictions: List[RagExamplePrediction] + def _construct_prediction_dataset( # type: ignore + self, predictions: Sequence[RagExamplePrediction] ) -> RagPredictionDataset: """Construct prediction dataset.""" return RagPredictionDataset(predictions=predictions) diff --git a/llama-index-core/llama_index/core/llama_dataset/simple.py b/llama-index-core/llama_index/core/llama_dataset/simple.py index 4393ab8c64a5f..98ef229771714 100644 --- a/llama-index-core/llama_index/core/llama_dataset/simple.py +++ b/llama-index-core/llama_index/core/llama_dataset/simple.py @@ -1,4 +1,4 @@ -from typing import Any, Optional, List +from typing import Any, Dict, List, Sequence, Optional from llama_index.core.llama_dataset.base import ( BaseLlamaDataExample, BaseLlamaDataset, @@ -44,11 +44,12 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = {} + data: Dict[str, List[str]] = {"label": []} if self.predictions: - data = { - "label": [t.label for t in self.predictions], - } + for t in self.predictions: + assert isinstance(t, self._prediction_type) + + data["label"].append(t.label) return pd.DataFrame(data) @@ -74,8 +75,8 @@ def class_name(self) -> str: class LabelledSimpleDataset(BaseLlamaDataset[LLM]): _example_type = LabelledSimpleDataExample - def _construct_prediction_dataset( - self, predictions: List[SimpleExamplePrediction] + def _construct_prediction_dataset( # type: ignore + self, predictions: Sequence[SimpleExamplePrediction] ) -> SimplePredictionDataset: """Construct the specific prediction dataset. @@ -96,20 +97,29 @@ def to_pandas(self) -> Any: "pandas is required for this function. Please install it with `pip install pandas`." ) - data = { - "reference_label": [t.reference_label for t in self.examples], - "text": [t.text for t in self.examples], - "text_by": [str(t.text_by) for t in self.examples], + data: Dict[str, List[str]] = { + "reference_label": [], + "text": [], + "text_by": [], } + for example in self.examples: + if not isinstance(example, self._example_type): + raise ValueError( + f"Expected example of type {LabelledSimpleDataExample}, got {type(example)}" + ) + + data["reference_label"].append(example.reference_label) + data["text"].append(example.text) + data["text_by"].append(str(example.text_by)) return pd.DataFrame(data) async def _apredict_example( self, predictor: LLM, - example: LabelledSimpleDataExample, + example: BaseLlamaDataExample, sleep_time_in_seconds: int, - ) -> SimpleExamplePrediction: + ) -> BaseLlamaExamplePrediction: """Async predict RAG example with a query engine.""" raise NotImplementedError("This method has not yet been implemented.") diff --git a/llama-index-core/llama_index/core/llms/callbacks.py b/llama-index-core/llama_index/core/llms/callbacks.py index 4a5cc8febfcac..32390f8a52f70 100644 --- a/llama-index-core/llama_index/core/llms/callbacks.py +++ b/llama-index-core/llama_index/core/llms/callbacks.py @@ -42,12 +42,9 @@ def wrap(f: Callable) -> Callable: def wrapper_logic(_self: Any) -> Generator[CallbackManager, None, None]: callback_manager = getattr(_self, "callback_manager", None) if not isinstance(callback_manager, CallbackManager): - raise ValueError( - "Cannot use llm_chat_callback on an instance " - "without a callback_manager attribute." - ) + _self.callback_manager = CallbackManager() - yield callback_manager + yield _self.callback_manager # type: ignore async def wrapped_async_llm_chat( _self: Any, messages: Sequence[ChatMessage], **kwargs: Any diff --git a/llama-index-core/llama_index/core/llms/custom.py b/llama-index-core/llama_index/core/llms/custom.py index 3cadc5d0ce23e..ea7fbbd4348dd 100644 --- a/llama-index-core/llama_index/core/llms/custom.py +++ b/llama-index-core/llama_index/core/llms/custom.py @@ -28,6 +28,8 @@ class CustomLLM(LLM): @llm_chat_callback() def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: + assert self.messages_to_prompt is not None + prompt = self.messages_to_prompt(messages) completion_response = self.complete(prompt, formatted=True, **kwargs) return completion_response_to_chat_response(completion_response) @@ -36,6 +38,8 @@ def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: def stream_chat( self, messages: Sequence[ChatMessage], **kwargs: Any ) -> ChatResponseGen: + assert self.messages_to_prompt is not None + prompt = self.messages_to_prompt(messages) completion_response_gen = self.stream_complete(prompt, formatted=True, **kwargs) return stream_completion_response_to_chat_response(completion_response_gen) diff --git a/llama-index-core/llama_index/core/llms/function_calling.py b/llama-index-core/llama_index/core/llms/function_calling.py index c6b82f8b5cbcd..c4809ef554877 100644 --- a/llama-index-core/llama_index/core/llms/function_calling.py +++ b/llama-index-core/llama_index/core/llms/function_calling.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional, Union, TYPE_CHECKING, Dict +from typing import Any, Dict, List, Optional, Sequence, Union, TYPE_CHECKING from abc import abstractmethod import asyncio @@ -29,7 +29,7 @@ class FunctionCallingLLM(LLM): def chat_with_tools( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -55,7 +55,7 @@ def chat_with_tools( async def achat_with_tools( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -81,7 +81,7 @@ async def achat_with_tools( def stream_chat_with_tools( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -102,7 +102,7 @@ def stream_chat_with_tools( async def astream_chat_with_tools( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -124,7 +124,7 @@ async def astream_chat_with_tools( @abstractmethod def _prepare_chat_with_tools( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -136,7 +136,7 @@ def _prepare_chat_with_tools( def _validate_chat_with_tools_response( self, response: ChatResponse, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], allow_parallel_tool_calls: bool = False, **kwargs: Any, ) -> ChatResponse: @@ -156,7 +156,7 @@ def get_tool_calls_from_response( def predict_and_call( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -204,7 +204,7 @@ def predict_and_call( raise ValueError("Invalid") elif len(tool_outputs) == 0: return AgentChatResponse( - response=response.message.content, sources=tool_outputs + response=response.message.content or "", sources=tool_outputs ) return AgentChatResponse( @@ -213,7 +213,7 @@ def predict_and_call( async def apredict_and_call( self, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], user_msg: Optional[Union[str, ChatMessage]] = None, chat_history: Optional[List[ChatMessage]] = None, verbose: bool = False, @@ -262,7 +262,7 @@ async def apredict_and_call( raise ValueError("Invalid") elif len(tool_outputs) == 0: return AgentChatResponse( - response=response.message.content, sources=tool_outputs + response=response.message.content or "", sources=tool_outputs ) return AgentChatResponse( diff --git a/llama-index-core/llama_index/core/llms/llm.py b/llama-index-core/llama_index/core/llms/llm.py index 501750e0fd5c5..813c726e5a0fe 100644 --- a/llama-index-core/llama_index/core/llms/llm.py +++ b/llama-index-core/llama_index/core/llms/llm.py @@ -315,6 +315,7 @@ def structured_predict( self, output_cls: BaseModel, prompt: PromptTemplate, + llm_kwargs: Optional[Dict[str, Any]] = None, **prompt_args: Any, ) -> BaseModel: r"""Structured predict. @@ -332,7 +333,7 @@ def structured_predict( Examples: ```python - from pydantic.v1 import BaseModel + from pydantic import BaseModel class Test(BaseModel): \"\"\"My test class.\"\"\" @@ -359,7 +360,7 @@ class Test(BaseModel): pydantic_program_mode=self.pydantic_program_mode, ) - result = program(**prompt_args) + result = program(llm_kwargs=llm_kwargs, **prompt_args) dispatcher.event(LLMStructuredPredictEndEvent(output=result)) return result @@ -385,7 +386,7 @@ async def astructured_predict( Examples: ```python - from pydantic.v1 import BaseModel + from pydantic import BaseModel class Test(BaseModel): \"\"\"My test class.\"\"\" @@ -422,6 +423,7 @@ def stream_structured_predict( self, output_cls: BaseModel, prompt: PromptTemplate, + llm_kwargs: Optional[Dict[str, Any]] = None, **prompt_args: Any, ) -> Generator[Union[Model, List[Model]], None, None]: r"""Stream Structured predict. @@ -439,7 +441,7 @@ def stream_structured_predict( Examples: ```python - from pydantic.v1 import BaseModel + from pydantic import BaseModel class Test(BaseModel): \"\"\"My test class.\"\"\" @@ -468,7 +470,7 @@ class Test(BaseModel): pydantic_program_mode=self.pydantic_program_mode, ) - result = program.stream_call(**prompt_args) + result = program.stream_call(llm_kwargs=llm_kwargs, **prompt_args) for r in result: dispatcher.event(LLMStructuredPredictInProgressEvent(output=r)) yield r @@ -480,6 +482,7 @@ async def astream_structured_predict( self, output_cls: BaseModel, prompt: PromptTemplate, + llm_kwargs: Optional[Dict[str, Any]] = None, **prompt_args: Any, ) -> AsyncGenerator[Union[Model, List[Model]], None]: r"""Async Stream Structured predict. @@ -497,7 +500,7 @@ async def astream_structured_predict( Examples: ```python - from pydantic.v1 import BaseModel + from pydantic import BaseModel class Test(BaseModel): \"\"\"My test class.\"\"\" @@ -528,7 +531,7 @@ async def gen() -> AsyncGenerator[Union[Model, List[Model]], None]: pydantic_program_mode=self.pydantic_program_mode, ) - result = await program.astream_call(**prompt_args) + result = await program.astream_call(llm_kwargs=llm_kwargs, **prompt_args) async for r in result: dispatcher.event(LLMStructuredPredictInProgressEvent(output=r)) yield r @@ -874,11 +877,13 @@ def _validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]: # do special check to see if prompt is a list of chat messages if isinstance(input["prompt"], get_args(List[ChatMessage])): - input["prompt"] = self.llm.messages_to_prompt(input["prompt"]) + if self.llm.messages_to_prompt: + input["prompt"] = self.llm.messages_to_prompt(input["prompt"]) input["prompt"] = validate_and_convert_stringable(input["prompt"]) else: input["prompt"] = validate_and_convert_stringable(input["prompt"]) - input["prompt"] = self.llm.completion_to_prompt(input["prompt"]) + if self.llm.completion_to_prompt: + input["prompt"] = self.llm.completion_to_prompt(input["prompt"]) return input @@ -888,6 +893,8 @@ def _run_component(self, **kwargs: Any) -> Any: # non-trivial to figure how to support chat/complete/etc. prompt = kwargs["prompt"] # ignore all other kwargs for now + + response: Any if self.streaming: response = self.llm.stream_complete(prompt, formatted=True) else: @@ -938,6 +945,8 @@ def _run_component(self, **kwargs: Any) -> Any: # TODO: support only complete for now # non-trivial to figure how to support chat/complete/etc. messages = kwargs["messages"] + + response: Any if self.streaming: response = self.llm.stream_chat(messages) else: @@ -949,6 +958,8 @@ async def _arun_component(self, **kwargs: Any) -> Any: # TODO: support only complete for now # non-trivial to figure how to support chat/complete/etc. messages = kwargs["messages"] + + response: Any if self.streaming: response = await self.llm.astream_chat(messages) else: diff --git a/llama-index-core/llama_index/core/llms/loading.py b/llama-index-core/llama_index/core/llms/loading.py index 20ce3f0dcfea3..0875857f6d8a0 100644 --- a/llama-index-core/llama_index/core/llms/loading.py +++ b/llama-index-core/llama_index/core/llms/loading.py @@ -1,12 +1,10 @@ from typing import Dict, Type -from llama_index.core.llms.custom import CustomLLM from llama_index.core.llms.llm import LLM from llama_index.core.llms.mock import MockLLM RECOGNIZED_LLMS: Dict[str, Type[LLM]] = { MockLLM.class_name(): MockLLM, - CustomLLM.class_name(): CustomLLM, } # Conditionals for llama-cloud support diff --git a/llama-index-core/llama_index/core/llms/structured_llm.py b/llama-index-core/llama_index/core/llms/structured_llm.py index 26c58ef076e71..11bb7c755f332 100644 --- a/llama-index-core/llama_index/core/llms/structured_llm.py +++ b/llama-index-core/llama_index/core/llms/structured_llm.py @@ -48,7 +48,7 @@ def _escape_braces(text: str) -> str: Only captures template variables, skips already escaped braces. """ - def replace(match): + def replace(match: re.Match[str]) -> str: if match.group(0).startswith("{{") and match.group(0).endswith("}}"): return match.group(0) # Already escaped, return as is return "{{" + match.group(1) + "}}" @@ -107,7 +107,7 @@ def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: chat_prompt = ChatPromptTemplate(message_templates=_escape_json(messages)) output = self.llm.structured_predict( - output_cls=self.output_cls, prompt=chat_prompt + output_cls=self.output_cls, prompt=chat_prompt, llm_kwargs=kwargs ) return ChatResponse( message=ChatMessage( @@ -123,7 +123,7 @@ def stream_chat( chat_prompt = ChatPromptTemplate(message_templates=_escape_json(messages)) stream_output = self.llm.stream_structured_predict( - output_cls=self.output_cls, prompt=chat_prompt, **kwargs + output_cls=self.output_cls, prompt=chat_prompt, llm_kwargs=kwargs ) for partial_output in stream_output: yield ChatResponse( @@ -161,7 +161,7 @@ async def achat( chat_prompt = ChatPromptTemplate(message_templates=_escape_json(messages)) output = await self.llm.astructured_predict( - output_cls=self.output_cls, prompt=chat_prompt + output_cls=self.output_cls, prompt=chat_prompt, llm_kwargs=kwargs ) return ChatResponse( message=ChatMessage( @@ -182,7 +182,7 @@ async def gen() -> ChatResponseAsyncGen: chat_prompt = ChatPromptTemplate(message_templates=_escape_json(messages)) stream_output = await self.llm.astream_structured_predict( - output_cls=self.output_cls, prompt=chat_prompt, **kwargs + output_cls=self.output_cls, prompt=chat_prompt, llm_kwargs=kwargs ) async for partial_output in stream_output: yield ChatResponse( @@ -210,11 +210,12 @@ async def astream_complete( def _as_query_component(self, **kwargs: Any) -> QueryComponent: """Return query component.""" + base_component: BaseLLMComponent if self.metadata.is_chat_model: base_component = LLMChatComponent(llm=self, **kwargs) else: base_component = LLMCompleteComponent(llm=self, **kwargs) - # llm_component = self.llm.as_query_component() + return StructuredLLMComponent(llm_component=base_component) @@ -250,7 +251,7 @@ def _run_component(self, **kwargs: Any) -> Any: async def _arun_component(self, **kwargs: Any) -> Any: """Run component.""" - output = await self.llm_component.arun_component(**kwargs)["output"] + output = (await self.llm_component.arun_component(**kwargs))["output"] # NOTE: can either be a CompletionResponse or ChatResponse # other types are not supported at the moment if isinstance(output, CompletionResponse): diff --git a/llama-index-core/llama_index/core/llms/utils.py b/llama-index-core/llama_index/core/llms/utils.py index 99471aa73293d..5db7e6f7b3ed6 100644 --- a/llama-index-core/llama_index/core/llms/utils.py +++ b/llama-index-core/llama_index/core/llms/utils.py @@ -39,7 +39,7 @@ def resolve_llm( ) # pants: no-infer-dep llm = OpenAI() - validate_openai_api_key(llm.api_key) + validate_openai_api_key(llm.api_key) # type: ignore except ImportError: raise ImportError( "`llama-index-llms-openai` package not found, " @@ -99,6 +99,8 @@ def resolve_llm( print("LLM is explicitly disabled. Using MockLLM.") llm = MockLLM() + assert isinstance(llm, LLM) + llm.callback_manager = callback_manager or Settings.callback_manager return llm diff --git a/llama-index-core/llama_index/core/memory/chat_memory_buffer.py b/llama-index-core/llama_index/core/memory/chat_memory_buffer.py index fe316c8f63ddb..57bcca4944701 100644 --- a/llama-index-core/llama_index/core/memory/chat_memory_buffer.py +++ b/llama-index-core/llama_index/core/memory/chat_memory_buffer.py @@ -20,7 +20,6 @@ class ChatMemoryBuffer(BaseChatStoreMemory): token_limit: int tokenizer_fn: Callable[[str], List] = Field( - # NOTE: mypy does not handle the typing here well, hence the cast default_factory=get_tokenizer, exclude=True, ) @@ -54,8 +53,12 @@ def from_defaults( chat_store_key: str = DEFAULT_CHAT_STORE_KEY, token_limit: Optional[int] = None, tokenizer_fn: Optional[Callable[[str], List]] = None, + **kwargs: Any, ) -> "ChatMemoryBuffer": """Create a chat memory buffer from an LLM.""" + if kwargs: + raise ValueError(f"Unexpected kwargs: {kwargs}") + if llm is not None: context_window = llm.metadata.context_window token_limit = token_limit or int(context_window * DEFAULT_TOKEN_LIMIT_RATIO) @@ -95,11 +98,11 @@ def from_dict(cls, data: Dict[str, Any], **kwargs: Any) -> "ChatMemoryBuffer": # NOTE: this handles backwards compatibility with the old chat history if "chat_history" in data: chat_history = data.pop("chat_history") - chat_store = SimpleChatStore(store={DEFAULT_CHAT_STORE_KEY: chat_history}) - data["chat_store"] = chat_store + simple_store = SimpleChatStore(store={DEFAULT_CHAT_STORE_KEY: chat_history}) + data["chat_store"] = simple_store elif "chat_store" in data: - chat_store = data.pop("chat_store") - chat_store = load_chat_store(chat_store) + chat_store_dict = data.pop("chat_store") + chat_store = load_chat_store(chat_store_dict) data["chat_store"] = chat_store return cls(**data) diff --git a/llama-index-core/llama_index/core/memory/chat_summary_memory_buffer.py b/llama-index-core/llama_index/core/memory/chat_summary_memory_buffer.py index 3f0bc4c21f9aa..cf368d9009b0d 100644 --- a/llama-index-core/llama_index/core/memory/chat_summary_memory_buffer.py +++ b/llama-index-core/llama_index/core/memory/chat_summary_memory_buffer.py @@ -44,7 +44,6 @@ class ChatSummaryMemoryBuffer(BaseMemory): llm: Optional[SerializeAsAny[LLM]] = None summarize_prompt: Optional[str] = None tokenizer_fn: Callable[[str], List] = Field( - # NOTE: mypy does not handle the typing here well, hence the cast default_factory=get_tokenizer, exclude=True, ) @@ -55,7 +54,7 @@ class ChatSummaryMemoryBuffer(BaseMemory): _token_count: int = PrivateAttr(default=0) @field_serializer("chat_store") - def serialize_courses_in_order(chat_store: BaseChatStore): + def serialize_courses_in_order(self, chat_store: BaseChatStore) -> dict: res = chat_store.model_dump() res.update({"class_name": chat_store.class_name()}) return res @@ -89,10 +88,14 @@ def from_defaults( tokenizer_fn: Optional[Callable[[str], List]] = None, summarize_prompt: Optional[str] = None, count_initial_tokens: bool = False, + **kwargs: Any, ) -> "ChatSummaryMemoryBuffer": """Create a chat memory buffer from an LLM and an initial list of chat history messages. """ + if kwargs: + raise ValueError(f"Unexpected keyword arguments: {kwargs}") + if llm is not None: context_window = llm.metadata.context_window token_limit = token_limit or int(context_window * DEFAULT_TOKEN_LIMIT_RATIO) @@ -144,11 +147,11 @@ def from_dict( # NOTE: this handles backwards compatibility with the old chat history if "chat_history" in data: chat_history = data.pop("chat_history") - chat_store = SimpleChatStore(store={DEFAULT_CHAT_STORE_KEY: chat_history}) - data["chat_store"] = chat_store + simple_store = SimpleChatStore(store={DEFAULT_CHAT_STORE_KEY: chat_history}) + data["chat_store"] = simple_store elif "chat_store" in data: - chat_store = data.pop("chat_store") - chat_store = load_chat_store(chat_store) + chat_store_dict = data.pop("chat_store") + chat_store = load_chat_store(chat_store_dict) data["chat_store"] = chat_store # NOTE: The llm will have to be set manually in kwargs @@ -157,7 +160,9 @@ def from_dict( return cls(**data, **kwargs) - def get(self, initial_token_count: int = 0, **kwargs: Any) -> List[ChatMessage]: + def get( + self, input: Optional[str] = None, initial_token_count: int = 0, **kwargs: Any + ) -> List[ChatMessage]: """Get chat history.""" chat_history = self.get_all() if len(chat_history) == 0: @@ -224,7 +229,7 @@ def _split_messages_summary_or_full_text( """Determine which messages will be included as full text, and which will have to be summarized by the llm. """ - chat_history_full_text = [] + chat_history_full_text: List[ChatMessage] = [] message_count = len(chat_history) while ( message_count > 0 @@ -251,6 +256,8 @@ def _summarize_oldest_chat_history( """Use the llm to summarize the messages that do not fit into the buffer. """ + assert self.llm is not None + # Only summarize if there is new information to be summarized if ( len(chat_history_to_be_summarized) == 1 @@ -270,7 +277,7 @@ def _summarize_oldest_chat_history( def _get_prompt_to_summarize( self, chat_history_to_be_summarized: List[ChatMessage] - ): + ) -> str: """Ask the LLM to summarize the chat history so far.""" # TODO: This probably works better when question/answers are considered together. prompt = '"Transcript so far: ' @@ -289,14 +296,14 @@ def _get_prompt_to_summarize( + "\n\n" ) prompt += '"\n\n' - prompt += self.summarize_prompt + prompt += self.summarize_prompt or "" return prompt def _handle_assistant_and_tool_messages( self, chat_history_full_text: List[ChatMessage], chat_history_to_be_summarized: List[ChatMessage], - ) -> Tuple[List[ChatMessage], List[ChatMessage]]: + ) -> None: """To avoid breaking API's, we need to ensure the following. - the first message cannot be ASSISTANT diff --git a/llama-index-core/llama_index/core/memory/simple_composable_memory.py b/llama-index-core/llama_index/core/memory/simple_composable_memory.py index c11c86ad9670f..6a487468ded69 100644 --- a/llama-index-core/llama_index/core/memory/simple_composable_memory.py +++ b/llama-index-core/llama_index/core/memory/simple_composable_memory.py @@ -42,8 +42,12 @@ def from_defaults( cls, primary_memory: Optional[BaseMemory] = None, secondary_memory_sources: Optional[List[BaseMemory]] = None, + **kwargs: Any, ) -> "SimpleComposableMemory": """Create a simple composable memory from an LLM.""" + if kwargs: + raise ValueError(f"Unexpected kwargs: {kwargs}") + primary_memory = primary_memory or ChatMemoryBuffer.from_defaults() secondary_memory_sources = secondary_memory_sources or [] @@ -100,6 +104,7 @@ def _compose_message_histories( # add single_secondary_memory_str to chat_history if len(messages) > 0 and messages[0].role == MessageRole.SYSTEM: + assert messages[0].content is not None system_message = messages[0].content.split( DEFAULT_INTRO_HISTORY_MESSAGE )[0] diff --git a/llama-index-core/llama_index/core/memory/types.py b/llama-index-core/llama_index/core/memory/types.py index 8f7d318e4f491..c9ee2f6396183 100644 --- a/llama-index-core/llama_index/core/memory/types.py +++ b/llama-index-core/llama_index/core/memory/types.py @@ -25,8 +25,7 @@ def class_name(cls) -> str: @abstractmethod def from_defaults( cls, - chat_history: Optional[List[ChatMessage]] = None, - llm: Optional[LLM] = None, + **kwargs: Any, ) -> "BaseMemory": """Create a chat memory from defaults.""" @@ -66,7 +65,7 @@ class BaseChatStoreMemory(BaseMemory): chat_store_key: str = Field(default=DEFAULT_CHAT_STORE_KEY) @field_serializer("chat_store") - def serialize_courses_in_order(chat_store: BaseChatStore): + def serialize_courses_in_order(self, chat_store: BaseChatStore) -> dict: res = chat_store.model_dump() res.update({"class_name": chat_store.class_name()}) return res @@ -82,6 +81,7 @@ def from_defaults( cls, chat_history: Optional[List[ChatMessage]] = None, llm: Optional[LLM] = None, + **kwargs: Any, ) -> "BaseChatStoreMemory": """Create a chat memory from defaults.""" diff --git a/llama-index-core/llama_index/core/memory/utils.py b/llama-index-core/llama_index/core/memory/utils.py deleted file mode 100644 index 7aa5e2a459df2..0000000000000 --- a/llama-index-core/llama_index/core/memory/utils.py +++ /dev/null @@ -1,78 +0,0 @@ -from typing import Any, AsyncGenerator, Callable, Generator, Union - -from llama_index.core.llms import ChatResponseGen, ChatResponseAsyncGen, ChatResponse -from llama_index.core.memory import BaseMemory - - -def stream_and_write( - memory: BaseMemory, - stream: ChatResponseGen, - condition_fn: Callable[[ChatResponse], bool], - process_fn: Callable[[Any], Any] = lambda x: x, -) -> Generator[Union[bool, ChatResponse], None, None]: - """ - Exposes a streaming generator while writing the result to memory. - - First yields the condition met status, then yields the processed chunk, - and finally yields the full response. - """ - full_response = None - condition_met = False - condition_yielded = False - - for chunk in stream: - if not condition_met: - condition_met = condition_fn(chunk) - if not condition_yielded: - yield condition_met - condition_yielded = True - if condition_met: - full_response = chunk - break - - if not condition_met: - yield process_fn(chunk) - - full_response = chunk - - if full_response: - memory.put(full_response) - - yield full_response - - -async def astream_and_write( - memory: BaseMemory, - stream: ChatResponseAsyncGen, - condition_fn: Callable[[ChatResponse], bool], - process_fn: Callable[[Any], Any] = lambda x: x, -) -> AsyncGenerator[Union[bool, ChatResponse], None]: - """ - Exposes an async streaming generator while writing the result to memory. - - First yields the condition met status, then yields the processed chunk, - and finally yields the full response. - """ - full_response = None - condition_met = False - condition_yielded = False - - async for chunk in stream: - if not condition_met: - condition_met = condition_fn(chunk) - if not condition_yielded: - yield condition_met - condition_yielded = True - if condition_met: - full_response = chunk - break - - if not condition_met: - yield process_fn(chunk) - - full_response = chunk - - if full_response: - memory.put(full_response) - - yield full_response diff --git a/llama-index-core/llama_index/core/memory/vector_memory.py b/llama-index-core/llama_index/core/memory/vector_memory.py index 58353a765865a..eead91052d8e1 100644 --- a/llama-index-core/llama_index/core/memory/vector_memory.py +++ b/llama-index-core/llama_index/core/memory/vector_memory.py @@ -5,18 +5,18 @@ """ import uuid -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from llama_index.core.bridge.pydantic import field_validator from llama_index.core.schema import TextNode -from llama_index.core.vector_stores.types import VectorStore +from llama_index.core.vector_stores.types import BasePydanticVectorStore from llama_index.core.base.llms.types import ChatMessage, MessageRole from llama_index.core.bridge.pydantic import Field from llama_index.core.memory.types import BaseMemory from llama_index.core.embeddings.utils import EmbedType -def _stringify_obj(d: Any): +def _stringify_obj(d: Any) -> Union[str, list, dict]: """Utility function to convert all keys in a dictionary to strings.""" if isinstance(d, list): return [_stringify_obj(v) for v in d] @@ -90,15 +90,16 @@ def class_name(cls) -> str: @classmethod def from_defaults( cls, - vector_store: Optional[VectorStore] = None, + vector_store: Optional[BasePydanticVectorStore] = None, embed_model: Optional[EmbedType] = None, index_kwargs: Optional[Dict] = None, retriever_kwargs: Optional[Dict] = None, + **kwargs: Any, ) -> "VectorMemory": """Create vector memory. Args: - vector_store (Optional[VectorStore]): vector store (note: delete_nodes must + vector_store (Optional[BasePydanticVectorStore]): vector store (note: delete_nodes must be implemented. At time of writing (May 2024), Chroma, Qdrant and SimpleVectorStore all support delete_nodes. embed_model (Optional[EmbedType]): embedding model @@ -108,6 +109,9 @@ def from_defaults( """ from llama_index.core.indices.vector_store import VectorStoreIndex + if kwargs: + raise ValueError(f"Unexpected kwargs: {kwargs}") + index_kwargs = index_kwargs or {} retriever_kwargs = retriever_kwargs or {} diff --git a/llama-index-core/llama_index/core/multi_modal_llms/base.py b/llama-index-core/llama_index/core/multi_modal_llms/base.py index fe0b61f5f2be8..49daf9127ab7c 100644 --- a/llama-index-core/llama_index/core/multi_modal_llms/base.py +++ b/llama-index-core/llama_index/core/multi_modal_llms/base.py @@ -30,7 +30,7 @@ ) from llama_index.core.instrumentation import DispatcherSpanMixin from llama_index.core.llms.callbacks import llm_completion_callback, llm_chat_callback -from llama_index.core.schema import BaseComponent, ImageDocument +from llama_index.core.schema import BaseComponent, ImageNode class MultiModalLLMMetadata(BaseModel): @@ -93,13 +93,13 @@ def metadata(self) -> MultiModalLLMMetadata: @abstractmethod def complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: List[ImageNode], **kwargs: Any ) -> CompletionResponse: """Completion endpoint for Multi-Modal LLM.""" @abstractmethod def stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: List[ImageNode], **kwargs: Any ) -> CompletionResponseGen: """Streaming completion endpoint for Multi-Modal LLM.""" @@ -123,13 +123,13 @@ def stream_chat( @abstractmethod async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: List[ImageNode], **kwargs: Any ) -> CompletionResponse: """Async completion endpoint for Multi-Modal LLM.""" @abstractmethod async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: List[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: """Async streaming completion endpoint for Multi-Modal LLM.""" @@ -157,7 +157,7 @@ def _as_query_component(self, **kwargs: Any) -> QueryComponent: else: return MultiModalCompleteComponent(multi_modal_llm=self, **kwargs) - def __init_subclass__(cls, **kwargs) -> None: + def __init_subclass__(cls, **kwargs: Any) -> None: """ The callback decorators installs events, so they must be applied before the span decorators, otherwise the spans wouldn't contain the events. @@ -213,9 +213,9 @@ def _validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]: if not isinstance(input["image_documents"], list): raise ValueError("image_documents must be a list.") for doc in input["image_documents"]: - if not isinstance(doc, ImageDocument): + if not isinstance(doc, ImageNode): raise ValueError( - "image_documents must be a list of ImageDocument objects." + "image_documents must be a list of ImageNode objects." ) return input @@ -225,6 +225,8 @@ def _run_component(self, **kwargs: Any) -> Any: # TODO: support only complete for now prompt = kwargs["prompt"] image_documents = kwargs.get("image_documents", []) + + response: Any if self.streaming: response = self.multi_modal_llm.stream_complete(prompt, image_documents) else: @@ -237,6 +239,8 @@ async def _arun_component(self, **kwargs: Any) -> Any: # non-trivial to figure how to support chat/complete/etc. prompt = kwargs["prompt"] image_documents = kwargs.get("image_documents", []) + + response: Any if self.streaming: response = await self.multi_modal_llm.astream_complete( prompt, image_documents diff --git a/llama-index-core/llama_index/core/node_parser/interface.py b/llama-index-core/llama_index/core/node_parser/interface.py index 2d3fedf98c8fe..16bead0c1fece 100644 --- a/llama-index-core/llama_index/core/node_parser/interface.py +++ b/llama-index-core/llama_index/core/node_parser/interface.py @@ -21,6 +21,7 @@ Document, MetadataMode, NodeRelationship, + TextNode, TransformComponent, ) from llama_index.core.utils import get_tqdm_iterable @@ -85,8 +86,8 @@ def _postprocess_parsed_nodes( self, nodes: List[BaseNode], parent_doc_map: Dict[str, Document] ) -> List[BaseNode]: for i, node in enumerate(nodes): - parent_doc = parent_doc_map.get(node.ref_doc_id, None) - parent_node = node.relationships.get(NodeRelationship.SOURCE, None) + parent_doc = parent_doc_map.get(node.ref_doc_id or "", None) + parent_node = node.source_node if parent_doc is not None: if parent_doc.source_node is not None: @@ -100,7 +101,7 @@ def _postprocess_parsed_nodes( ) # update start/end char idx - if start_char_idx >= 0: + if start_char_idx >= 0 and isinstance(node, TextNode): node.start_char_idx = start_char_idx node.end_char_idx = start_char_idx + len( node.get_content(metadata_mode=MetadataMode.NONE) @@ -126,7 +127,7 @@ def _postprocess_parsed_nodes( i > 0 and node.source_node and nodes[i - 1].source_node - and nodes[i - 1].source_node.node_id == node.source_node.node_id + and nodes[i - 1].source_node.node_id == node.source_node.node_id # type: ignore ): node.relationships[NodeRelationship.PREVIOUS] = nodes[ i - 1 @@ -135,7 +136,7 @@ def _postprocess_parsed_nodes( i < len(nodes) - 1 and node.source_node and nodes[i + 1].source_node - and nodes[i + 1].source_node.node_id == node.source_node.node_id + and nodes[i + 1].source_node.node_id == node.source_node.node_id # type: ignore ): node.relationships[NodeRelationship.NEXT] = nodes[ i + 1 @@ -188,11 +189,11 @@ async def aget_nodes_from_documents( return nodes - def __call__(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: - return self.get_nodes_from_documents(nodes, **kwargs) + def __call__(self, nodes: Sequence[BaseNode], **kwargs: Any) -> List[BaseNode]: + return self.get_nodes_from_documents(nodes, **kwargs) # type: ignore - async def acall(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: - return await self.aget_nodes_from_documents(nodes, **kwargs) + async def acall(self, nodes: Sequence[BaseNode], **kwargs: Any) -> List[BaseNode]: + return await self.aget_nodes_from_documents(nodes, **kwargs) # type: ignore class TextSplitter(NodeParser): diff --git a/llama-index-core/llama_index/core/node_parser/relational/base_element.py b/llama-index-core/llama_index/core/node_parser/relational/base_element.py index bba0e38351d95..bae8e5b0679a0 100644 --- a/llama-index-core/llama_index/core/node_parser/relational/base_element.py +++ b/llama-index-core/llama_index/core/node_parser/relational/base_element.py @@ -376,6 +376,8 @@ def get_nodes_from_elements( # attempt to find start_char_idx for table # raw table string regardless if perfect or not is stored in element.element + start_char_idx: Optional[int] = None + end_char_idx: Optional[int] = None if ref_doc_text: start_char_idx = ref_doc_text.find(str(element.element)) if start_char_idx >= 0: @@ -383,10 +385,6 @@ def get_nodes_from_elements( else: start_char_idx = None end_char_idx = None - else: - start_char_idx = None - end_char_idx = None - # shared index_id and node_id node_id = str(uuid.uuid4()) index_node = IndexNode( @@ -442,14 +440,14 @@ def get_nodes_from_elements( node.excluded_llm_metadata_keys = ( node_inherited.excluded_llm_metadata_keys ) - return [node for node in nodes if len(node.text) > 0] + return [node for node in nodes if len(node.get_content()) > 0] - def __call__(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: - nodes = self.get_nodes_from_documents(nodes, **kwargs) + def __call__(self, nodes: Sequence[BaseNode], **kwargs: Any) -> List[BaseNode]: + nodes = self.get_nodes_from_documents(nodes, **kwargs) # type: ignore nodes, objects = self.get_nodes_and_objects(nodes) - return nodes + objects + return nodes + objects # type: ignore - async def acall(self, nodes: List[BaseNode], **kwargs: Any) -> List[BaseNode]: - nodes = await self.aget_nodes_from_documents(nodes, **kwargs) + async def acall(self, nodes: Sequence[BaseNode], **kwargs: Any) -> List[BaseNode]: + nodes = await self.aget_nodes_from_documents(nodes, **kwargs) # type: ignore nodes, objects = self.get_nodes_and_objects(nodes) - return nodes + objects + return nodes + objects # type: ignore diff --git a/llama-index-core/llama_index/core/node_parser/relational/hierarchical.py b/llama-index-core/llama_index/core/node_parser/relational/hierarchical.py index 95b1683292f7e..96ddb0f07ab06 100644 --- a/llama-index-core/llama_index/core/node_parser/relational/hierarchical.py +++ b/llama-index-core/llama_index/core/node_parser/relational/hierarchical.py @@ -13,7 +13,7 @@ def _add_parent_child_relationship(parent_node: BaseNode, child_node: BaseNode) -> None: """Add parent/child relationship between nodes.""" - child_list = parent_node.relationships.get(NodeRelationship.CHILD, []) + child_list = parent_node.child_nodes or [] child_list.append(child_node.as_related_node_info()) parent_node.relationships[NodeRelationship.CHILD] = child_list @@ -47,9 +47,7 @@ def get_child_nodes(nodes: List[BaseNode], all_nodes: List[BaseNode]) -> List[Ba if NodeRelationship.CHILD not in node.relationships: continue - children_ids.extend( - [r.node_id for r in node.relationships[NodeRelationship.CHILD]] - ) + children_ids.extend([r.node_id for r in (node.child_nodes or [])]) child_nodes = [] for candidate_node in all_nodes: diff --git a/llama-index-core/llama_index/core/node_parser/relational/llama_parse_json_element.py b/llama-index-core/llama_index/core/node_parser/relational/llama_parse_json_element.py index c7613cc6b998c..88a6f5a3d2f60 100644 --- a/llama-index-core/llama_index/core/node_parser/relational/llama_parse_json_element.py +++ b/llama-index-core/llama_index/core/node_parser/relational/llama_parse_json_element.py @@ -75,7 +75,7 @@ def extract_elements( """ elements: List[Element] = [] currentElement = None - page_number = node_metadata.get("page") + page_number = node_metadata.get("page") if node_metadata is not None else 0 if mode == "json" and node_metadata is not None: json_items = node_metadata.get("items") or [] @@ -202,6 +202,8 @@ def extract_elements( for idx, element in enumerate(elements): if element.type == "table": + assert element.markdown is not None + should_keep = True perfect_table = True @@ -224,6 +226,7 @@ def extract_elements( # if the element is a table, convert it to a dataframe if should_keep: if perfect_table: + assert element.markdown is not None table = md_to_df(element.markdown) elements[idx] = Element( diff --git a/llama-index-core/llama_index/core/node_parser/text/semantic_double_merging_splitter.py b/llama-index-core/llama_index/core/node_parser/text/semantic_double_merging_splitter.py index b96f444900002..c69575161590e 100644 --- a/llama-index-core/llama_index/core/node_parser/text/semantic_double_merging_splitter.py +++ b/llama-index-core/llama_index/core/node_parser/text/semantic_double_merging_splitter.py @@ -58,8 +58,8 @@ def load_model(self) -> None: raise ImportError( "Spacy is not installed, please install it with `pip install spacy`." ) - self.nlp = spacy.load(self.spacy_model) - self.stopwords = set(stopwords.words(self.language)) + self.nlp = spacy.load(self.spacy_model) # type: ignore + self.stopwords = set(stopwords.words(self.language)) # type: ignore class SemanticDoubleMergingSplitterNodeParser(NodeParser): @@ -215,6 +215,9 @@ def _create_initial_chunks(self, sentences: List[str]) -> List[str]: initial_chunks: List[str] = [] chunk = sentences[0] # "" new = True + + assert self.language_config.nlp is not None + for sentence in sentences[1:]: if new: # check if 2 sentences got anything in common @@ -273,6 +276,8 @@ def _merge_initial_chunks(self, initial_chunks: List[str]) -> List[str]: skip = 0 current = initial_chunks[0] + assert self.language_config.nlp is not None + # TODO avoid connecting 1st chunk with 3rd if 2nd one is above some value, or if its length is above some value for i in range(1, len(initial_chunks)): diff --git a/llama-index-core/llama_index/core/node_parser/text/sentence.py b/llama-index-core/llama_index/core/node_parser/text/sentence.py index 45d1cc3c85738..85875de59187f 100644 --- a/llama-index-core/llama_index/core/node_parser/text/sentence.py +++ b/llama-index-core/llama_index/core/node_parser/text/sentence.py @@ -55,7 +55,7 @@ class SentenceSplitter(MetadataAwareTextSplitter): paragraph_separator: str = Field( default=DEFAULT_PARAGRAPH_SEP, description="Separator between paragraphs." ) - secondary_chunking_regex: str = Field( + secondary_chunking_regex: Optional[str] = Field( default=CHUNKING_REGEX, description="Backup regex for splitting into sentences." ) @@ -72,7 +72,7 @@ def __init__( tokenizer: Optional[Callable] = None, paragraph_separator: str = DEFAULT_PARAGRAPH_SEP, chunking_tokenizer_fn: Optional[Callable[[str], List[str]]] = None, - secondary_chunking_regex: str = CHUNKING_REGEX, + secondary_chunking_regex: Optional[str] = CHUNKING_REGEX, callback_manager: Optional[CallbackManager] = None, include_metadata: bool = True, include_prev_next_rel: bool = True, @@ -107,11 +107,17 @@ def __init__( self._chunking_tokenizer_fn, ] - self._sub_sentence_split_fns = [ - split_by_regex(secondary_chunking_regex), - split_by_sep(separator), - split_by_char(), - ] + if secondary_chunking_regex: + self._sub_sentence_split_fns = [ + split_by_regex(secondary_chunking_regex), + split_by_sep(separator), + split_by_char(), + ] + else: + self._sub_sentence_split_fns = [ + split_by_sep(separator), + split_by_char(), + ] @classmethod def from_defaults( diff --git a/llama-index-core/llama_index/core/node_parser/text/utils.py b/llama-index-core/llama_index/core/node_parser/text/utils.py index ec69fdd070411..9b740ed2a433a 100644 --- a/llama-index-core/llama_index/core/node_parser/text/utils.py +++ b/llama-index-core/llama_index/core/node_parser/text/utils.py @@ -1,5 +1,5 @@ import logging -from typing import Callable, List +from typing import Any, Callable, List from llama_index.core.node_parser.interface import TextSplitter @@ -32,7 +32,7 @@ def split_by_char() -> Callable[[str], List[str]]: return lambda text: list(text) -def split_by_sentence_tokenizer_internal(text: str, tokenizer) -> List[str]: +def split_by_sentence_tokenizer_internal(text: str, tokenizer: Any) -> List[str]: """Get the spans and then return the sentences. Using the start index of each span diff --git a/llama-index-core/llama_index/core/objects/base.py b/llama-index-core/llama_index/core/objects/base.py index 24cbf1f86b791..1a443082bcc02 100644 --- a/llama-index-core/llama_index/core/objects/base.py +++ b/llama-index-core/llama_index/core/objects/base.py @@ -22,7 +22,7 @@ BaseObjectNodeMapping, SimpleObjectNodeMapping, ) -from llama_index.core.schema import QueryBundle, QueryType, TextNode +from llama_index.core.schema import BaseNode, QueryBundle, QueryType from llama_index.core.storage.storage_context import ( DEFAULT_PERSIST_DIR, StorageContext, @@ -153,8 +153,8 @@ def from_objects( cls, objects: Sequence[OT], object_mapping: Optional[BaseObjectNodeMapping] = None, - from_node_fn: Optional[Callable[[TextNode], OT]] = None, - to_node_fn: Optional[Callable[[OT], TextNode]] = None, + from_node_fn: Optional[Callable[[BaseNode], OT]] = None, + to_node_fn: Optional[Callable[[OT], BaseNode]] = None, index_cls: Type[BaseIndex] = VectorStoreIndex, **index_kwargs: Any, ) -> "ObjectIndex": @@ -178,8 +178,8 @@ def from_objects_and_index( objects: Sequence[OT], index: BaseIndex, object_mapping: Optional[BaseObjectNodeMapping] = None, - from_node_fn: Optional[Callable[[TextNode], OT]] = None, - to_node_fn: Optional[Callable[[OT], TextNode]] = None, + from_node_fn: Optional[Callable[[BaseNode], OT]] = None, + to_node_fn: Optional[Callable[[OT], BaseNode]] = None, ) -> "ObjectIndex": from llama_index.core.objects.utils import get_object_mapping diff --git a/llama-index-core/llama_index/core/objects/base_node_mapping.py b/llama-index-core/llama_index/core/objects/base_node_mapping.py index eb4bfab6f818e..66e5ee6857380 100644 --- a/llama-index-core/llama_index/core/objects/base_node_mapping.py +++ b/llama-index-core/llama_index/core/objects/base_node_mapping.py @@ -57,10 +57,10 @@ def _add_object(self, obj: OT) -> None: """ @abstractmethod - def to_node(self, obj: OT) -> TextNode: + def to_node(self, obj: OT) -> BaseNode: """To node.""" - def to_nodes(self, objs: Sequence[OT]) -> Sequence[TextNode]: + def to_nodes(self, objs: Sequence[OT]) -> Sequence[BaseNode]: return [self.to_node(obj) for obj in objs] def from_node(self, node: BaseNode) -> OT: @@ -90,7 +90,7 @@ def from_persist_dir( """Load from serialization.""" obj_node_mapping = None errors = [] - for cls in BaseObjectNodeMapping.__subclasses__(): # type: ignore[misc] + for cls in BaseObjectNodeMapping.__subclasses__(): # type: ignore try: obj_node_mapping = cls.from_persist_dir( persist_dir=persist_dir, diff --git a/llama-index-core/llama_index/core/objects/fn_node_mapping.py b/llama-index-core/llama_index/core/objects/fn_node_mapping.py index 41dd46ac682bd..65a041e06c66f 100644 --- a/llama-index-core/llama_index/core/objects/fn_node_mapping.py +++ b/llama-index-core/llama_index/core/objects/fn_node_mapping.py @@ -7,7 +7,7 @@ DEFAULT_PERSIST_FNAME, BaseObjectNodeMapping, ) -from llama_index.core.schema import TextNode +from llama_index.core.schema import BaseNode class FnNodeMapping(BaseObjectNodeMapping[Any]): @@ -15,18 +15,18 @@ class FnNodeMapping(BaseObjectNodeMapping[Any]): def __init__( self, - from_node_fn: Callable[[TextNode], Any], - to_node_fn: Callable[[Any], TextNode], + from_node_fn: Callable[[BaseNode], Any], + to_node_fn: Callable[[Any], BaseNode], ) -> None: self._to_node_fn = to_node_fn self._from_node_fn = from_node_fn @classmethod - def from_objects( + def from_objects( # type: ignore cls, objs: Sequence[Any], - from_node_fn: Callable[[TextNode], Any], - to_node_fn: Callable[[Any], TextNode], + from_node_fn: Callable[[BaseNode], Any], + to_node_fn: Callable[[Any], BaseNode], *args: Any, **kwargs: Any, ) -> "BaseObjectNodeMapping": @@ -36,11 +36,11 @@ def from_objects( def _add_object(self, obj: Any) -> None: """Add object. NOTE: unused.""" - def to_node(self, obj: Any) -> TextNode: + def to_node(self, obj: Any) -> BaseNode: """To node.""" return self._to_node_fn(obj) - def _from_node(self, node: TextNode) -> Any: + def _from_node(self, node: BaseNode) -> Any: """From node.""" return self._from_node_fn(node) diff --git a/llama-index-core/llama_index/core/objects/utils.py b/llama-index-core/llama_index/core/objects/utils.py index eb83e77609c52..d4f91b2a71d69 100644 --- a/llama-index-core/llama_index/core/objects/utils.py +++ b/llama-index-core/llama_index/core/objects/utils.py @@ -5,13 +5,13 @@ from llama_index.core.objects.base_node_mapping import BaseObjectNodeMapping from llama_index.core.objects.fn_node_mapping import FnNodeMapping from llama_index.core.objects.tool_node_mapping import SimpleToolNodeMapping -from llama_index.core.schema import TextNode +from llama_index.core.schema import BaseNode def get_object_mapping( objects: Sequence[Any], - from_node_fn: Optional[Callable[[TextNode], Any]] = None, - to_node_fn: Optional[Callable[[Any], TextNode]] = None, + from_node_fn: Optional[Callable[[BaseNode], Any]] = None, + to_node_fn: Optional[Callable[[Any], BaseNode]] = None, ) -> BaseObjectNodeMapping: """Get object mapping according to object.""" if from_node_fn is not None and to_node_fn is not None: diff --git a/llama-index-core/llama_index/core/output_parsers/pydantic.py b/llama-index-core/llama_index/core/output_parsers/pydantic.py index 7487352e03d4b..faf98ee351ba0 100644 --- a/llama-index-core/llama_index/core/output_parsers/pydantic.py +++ b/llama-index-core/llama_index/core/output_parsers/pydantic.py @@ -36,7 +36,7 @@ def __init__( @property def output_cls(self) -> Type[Model]: - return self._output_cls + return self._output_cls # type: ignore @property def format_string(self) -> str: diff --git a/llama-index-core/llama_index/core/postprocessor/node.py b/llama-index-core/llama_index/core/postprocessor/node.py index 41397ba743066..a1242437f6ac1 100644 --- a/llama-index-core/llama_index/core/postprocessor/node.py +++ b/llama-index-core/llama_index/core/postprocessor/node.py @@ -70,7 +70,7 @@ def _postprocess_nodes( class SimilarityPostprocessor(BaseNodePostprocessor): """Similarity-based Node processor.""" - similarity_cutoff: float = Field(default=None) + similarity_cutoff: float = Field(default=0.0) @classmethod def class_name(cls) -> str: diff --git a/llama-index-core/llama_index/core/postprocessor/rankGPT_rerank.py b/llama-index-core/llama_index/core/postprocessor/rankGPT_rerank.py index 5c2ff3a973b2b..65d3e9c5bfbbc 100644 --- a/llama-index-core/llama_index/core/postprocessor/rankGPT_rerank.py +++ b/llama-index-core/llama_index/core/postprocessor/rankGPT_rerank.py @@ -76,6 +76,9 @@ async def _apostprocess_nodes( nodes: List[NodeWithScore], query_bundle: Optional[QueryBundle] = None, ) -> List[NodeWithScore]: + if query_bundle is None: + raise ValueError("Query bundle must be provided.") + items = { "query": query_bundle.query_str, "hits": [{"content": node.get_content()} for node in nodes], diff --git a/llama-index-core/llama_index/core/program/function_program.py b/llama-index-core/llama_index/core/program/function_program.py index 80af54b07687a..df008ce3b7b34 100644 --- a/llama-index-core/llama_index/core/program/function_program.py +++ b/llama-index-core/llama_index/core/program/function_program.py @@ -19,9 +19,9 @@ ValidationError, ConfigDict, ) -from llama_index.core.llms.llm import LLM from llama_index.core.base.llms.types import ChatResponse from llama_index.core.llms.function_calling import FunctionCallingLLM +from llama_index.core.llms.llm import LLM from llama_index.core.prompts.base import BasePromptTemplate, PromptTemplate from llama_index.core.settings import Settings from llama_index.core.types import BasePydanticProgram, Model @@ -34,7 +34,7 @@ def _parse_tool_outputs( agent_response: AgentChatResponse, allow_parallel_tool_calls: bool = False, -) -> List[BaseModel]: +) -> Union[BaseModel, List[BaseModel]]: """Parse tool outputs.""" outputs = [cast(BaseModel, s.raw_output) for s in agent_response.sources] if allow_parallel_tool_calls: @@ -77,11 +77,13 @@ def create_flexible_model(model: Type[BaseModel]) -> Type[FlexibleModel]: return create_model( f"Flexible{model.__name__}", __base__=FlexibleModel, - **{field: (Optional[Any], None) for field in model.__fields__}, - ) + **{field: (Optional[Any], None) for field in model.model_fields}, + ) # type: ignore -def num_valid_fields(obj: BaseModel) -> int: +def num_valid_fields( + obj: Union[BaseModel, List[BaseModel], Dict[str, BaseModel]] +) -> int: """ Recursively count the number of fields in a Pydantic object (including nested objects) that aren't None. @@ -122,9 +124,9 @@ class FunctionCallingProgram(BasePydanticProgram[BaseModel]): def __init__( self, output_cls: Type[Model], - llm: LLM, + llm: FunctionCallingLLM, prompt: BasePromptTemplate, - tool_choice: Union[str, Dict[str, Any]], + tool_choice: Optional[Union[str, Dict[str, Any]]] = None, allow_parallel_tool_calls: bool = False, verbose: bool = False, ) -> None: @@ -141,14 +143,16 @@ def from_defaults( cls, output_cls: Type[Model], prompt_template_str: Optional[str] = None, - prompt: Optional[PromptTemplate] = None, + prompt: Optional[BasePromptTemplate] = None, llm: Optional[LLM] = None, verbose: bool = False, allow_parallel_tool_calls: bool = False, tool_choice: Optional[Union[str, Dict[str, Any]]] = None, **kwargs: Any, ) -> "FunctionCallingProgram": - llm = llm or Settings.llm + llm = llm or Settings.llm # type: ignore + assert llm is not None + if not llm.metadata.is_function_calling_model: raise ValueError( f"Model name {llm.metadata.model_name} does not support " @@ -163,8 +167,8 @@ def from_defaults( prompt = PromptTemplate(prompt_template_str) return cls( - output_cls=output_cls, - llm=llm, + output_cls=output_cls, # type: ignore + llm=llm, # type: ignore prompt=cast(PromptTemplate, prompt), tool_choice=tool_choice, allow_parallel_tool_calls=allow_parallel_tool_calls, @@ -185,10 +189,10 @@ def prompt(self, prompt: BasePromptTemplate) -> None: def __call__( self, - llm_kwargs: Optional[Dict[str, Any]] = None, *args: Any, + llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any, - ) -> Union[Model, List[Model]]: + ) -> BaseModel: llm_kwargs = llm_kwargs or {} tool = _get_function_tool(self._output_cls) @@ -205,14 +209,14 @@ def __call__( return _parse_tool_outputs( agent_response, allow_parallel_tool_calls=self._allow_parallel_tool_calls, - ) + ) # type: ignore async def acall( self, - llm_kwargs: Optional[Dict[str, Any]] = None, *args: Any, + llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any, - ) -> Union[Model, List[Model]]: + ) -> BaseModel: llm_kwargs = llm_kwargs or {} tool = _get_function_tool(self._output_cls) @@ -226,7 +230,7 @@ async def acall( return _parse_tool_outputs( agent_response, allow_parallel_tool_calls=self._allow_parallel_tool_calls, - ) + ) # type: ignore def _process_objects( self, @@ -243,7 +247,7 @@ def _process_objects( # TODO: change if len(tool_calls) == 0: # if no tool calls, return single blank output_class - return output_cls() + return output_cls() # type: ignore tool_fn_args = [call.tool_kwargs for call in tool_calls] objects = [ @@ -263,18 +267,18 @@ def _process_objects( new_obj = self._output_cls.model_validate(obj.model_dump()) except ValidationError as e: _logger.warning(f"Failed to parse object: {e}") - new_obj = obj + new_obj = obj # type: ignore new_cur_objects.append(new_obj) if self._allow_parallel_tool_calls: - return new_cur_objects + return new_cur_objects # type: ignore else: if len(new_cur_objects) > 1: _logger.warning( "Multiple outputs found, returning first one. " "If you want to return all outputs, set output_multiple=True." ) - return new_cur_objects[0] + return new_cur_objects[0] # type: ignore def stream_call( self, *args: Any, llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any @@ -308,7 +312,7 @@ def stream_call( partial_output_cls = create_flexible_model(self._output_cls) cur_objects = None for partial_resp in chat_response_gen: - objects = self._process_objects( + objects: Union[Model, List[Model]] = self._process_objects( partial_resp, partial_output_cls, cur_objects=cur_objects ) cur_objects = objects if isinstance(objects, list) else [objects] @@ -323,7 +327,6 @@ async def astream_call( or a list of objects until it returns. """ - llm_kwargs = llm_kwargs or {} async def gen() -> AsyncGenerator[Union[Model, List[Model]], None]: # TODO: we can extend this to non-function calling LLMs as well, coming soon @@ -340,7 +343,7 @@ async def gen() -> AsyncGenerator[Union[Model, List[Model]], None]: chat_history=messages, verbose=self._verbose, allow_parallel_tool_calls=self._allow_parallel_tool_calls, - **llm_kwargs, + **(llm_kwargs or {}), ) # NOTE: create a new class that treats all its fields as optional # inspired by instructor @@ -348,7 +351,7 @@ async def gen() -> AsyncGenerator[Union[Model, List[Model]], None]: partial_output_cls = create_flexible_model(self._output_cls) cur_objects = None async for partial_resp in chat_response_gen: - objects = self._process_objects( + objects: Union[Union[Model, List[Model]]] = self._process_objects( partial_resp, partial_output_cls, cur_objects=cur_objects ) cur_objects = objects if isinstance(objects, list) else [objects] diff --git a/llama-index-core/llama_index/core/program/llm_program.py b/llama-index-core/llama_index/core/program/llm_program.py index 608cf5917b693..dc73b801f7a24 100644 --- a/llama-index-core/llama_index/core/program/llm_program.py +++ b/llama-index-core/llama_index/core/program/llm_program.py @@ -38,7 +38,7 @@ def from_defaults( output_parser: Optional[BaseOutputParser] = None, output_cls: Optional[Type[BaseModel]] = None, prompt_template_str: Optional[str] = None, - prompt: Optional[PromptTemplate] = None, + prompt: Optional[BasePromptTemplate] = None, llm: Optional[LLM] = None, verbose: bool = False, **kwargs: Any, @@ -90,9 +90,9 @@ def __call__( if self._llm.metadata.is_chat_model: messages = self._prompt.format_messages(llm=self._llm, **kwargs) messages = self._llm._extend_messages(messages) - response = self._llm.chat(messages, **llm_kwargs) + chat_response = self._llm.chat(messages, **llm_kwargs) - raw_output = response.message.content or "" + raw_output = chat_response.message.content or "" else: formatted_prompt = self._prompt.format(llm=self._llm, **kwargs) @@ -117,9 +117,9 @@ async def acall( if self._llm.metadata.is_chat_model: messages = self._prompt.format_messages(llm=self._llm, **kwargs) messages = self._llm._extend_messages(messages) - response = await self._llm.achat(messages, **llm_kwargs) + chat_response = await self._llm.achat(messages, **llm_kwargs) - raw_output = response.message.content or "" + raw_output = chat_response.message.content or "" else: formatted_prompt = self._prompt.format(llm=self._llm, **kwargs) diff --git a/llama-index-core/llama_index/core/program/multi_modal_llm_program.py b/llama-index-core/llama_index/core/program/multi_modal_llm_program.py index f7b2524fbb7a8..43206e89c0b96 100644 --- a/llama-index-core/llama_index/core/program/multi_modal_llm_program.py +++ b/llama-index-core/llama_index/core/program/multi_modal_llm_program.py @@ -1,10 +1,10 @@ -from typing import Any, Dict, Optional, Sequence, Type, cast +from typing import Any, Dict, List, Optional, Type, cast from llama_index.core.bridge.pydantic import BaseModel from llama_index.core.multi_modal_llms import MultiModalLLM from llama_index.core.output_parsers.pydantic import PydanticOutputParser from llama_index.core.prompts.base import BasePromptTemplate, PromptTemplate -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode from llama_index.core.types import BasePydanticProgram from llama_index.core.utils import print_text @@ -22,7 +22,7 @@ def __init__( output_parser: PydanticOutputParser, prompt: BasePromptTemplate, multi_modal_llm: MultiModalLLM, - image_documents: Sequence[ImageDocument], + image_documents: List[ImageNode], verbose: bool = False, ) -> None: self._output_parser = output_parser @@ -41,7 +41,7 @@ def from_defaults( prompt_template_str: Optional[str] = None, prompt: Optional[PromptTemplate] = None, multi_modal_llm: Optional[MultiModalLLM] = None, - image_documents: Optional[Sequence[ImageDocument]] = None, + image_documents: Optional[List[ImageNode]] = None, verbose: bool = False, **kwargs: Any, ) -> "MultiModalLLMCompletionProgram": @@ -94,12 +94,12 @@ def prompt(self, prompt: BasePromptTemplate) -> None: def __call__( self, llm_kwargs: Optional[Dict[str, Any]] = None, - image_documents: Optional[Sequence[ImageDocument]] = None, + image_documents: Optional[List[ImageNode]] = None, *args: Any, **kwargs: Any, ) -> BaseModel: llm_kwargs = llm_kwargs or {} - formatted_prompt = self._prompt.format(llm=self._multi_modal_llm, **kwargs) + formatted_prompt = self._prompt.format(llm=self._multi_modal_llm, **kwargs) # type: ignore response = self._multi_modal_llm.complete( formatted_prompt, @@ -116,12 +116,12 @@ def __call__( async def acall( self, llm_kwargs: Optional[Dict[str, Any]] = None, - image_documents: Optional[Sequence[ImageDocument]] = None, + image_documents: Optional[List[ImageNode]] = None, *args: Any, **kwargs: Any, ) -> BaseModel: llm_kwargs = llm_kwargs or {} - formatted_prompt = self._prompt.format(llm=self._multi_modal_llm, **kwargs) + formatted_prompt = self._prompt.format(llm=self._multi_modal_llm, **kwargs) # type: ignore response = await self._multi_modal_llm.acomplete( formatted_prompt, diff --git a/llama-index-core/llama_index/core/program/utils.py b/llama-index-core/llama_index/core/program/utils.py index d1af79f20675c..9bab1a3a43765 100644 --- a/llama-index-core/llama_index/core/program/utils.py +++ b/llama-index-core/llama_index/core/program/utils.py @@ -5,7 +5,7 @@ from llama_index.core.bridge.pydantic import BaseModel, Field, create_model from llama_index.core.llms.llm import LLM from llama_index.core.output_parsers.pydantic import PydanticOutputParser -from llama_index.core.prompts.base import PromptTemplate +from llama_index.core.prompts.base import BasePromptTemplate from llama_index.core.types import BasePydanticProgram, PydanticProgramMode @@ -33,7 +33,7 @@ def create_list_model(base_cls: Type[BaseModel]) -> Type[BaseModel]: def get_program_for_llm( output_cls: BaseModel, - prompt: PromptTemplate, + prompt: BasePromptTemplate, llm: LLM, pydantic_program_mode: PydanticProgramMode = PydanticProgramMode.DEFAULT, **kwargs: Any, @@ -44,7 +44,7 @@ def get_program_for_llm( from llama_index.core.program.function_program import FunctionCallingProgram return FunctionCallingProgram.from_defaults( - output_cls=output_cls, + output_cls=output_cls, # type: ignore llm=llm, prompt=prompt, **kwargs, @@ -55,7 +55,7 @@ def get_program_for_llm( ) return LLMTextCompletionProgram.from_defaults( - output_parser=PydanticOutputParser(output_cls=output_cls), + output_parser=PydanticOutputParser(output_cls=output_cls), # type: ignore llm=llm, prompt=prompt, **kwargs, @@ -66,7 +66,7 @@ def get_program_for_llm( ) # pants: no-infer-dep return OpenAIPydanticProgram.from_defaults( - output_cls=output_cls, + output_cls=output_cls, # type: ignore llm=llm, prompt=prompt, **kwargs, @@ -75,7 +75,7 @@ def get_program_for_llm( from llama_index.core.program.function_program import FunctionCallingProgram return FunctionCallingProgram.from_defaults( - output_cls=output_cls, + output_cls=output_cls, # type: ignore llm=llm, prompt=prompt, **kwargs, @@ -85,7 +85,7 @@ def get_program_for_llm( from llama_index.core.program.llm_program import LLMTextCompletionProgram return LLMTextCompletionProgram.from_defaults( - output_parser=PydanticOutputParser(output_cls=output_cls), + output_parser=PydanticOutputParser(output_cls=output_cls), # type: ignore llm=llm, prompt=prompt, **kwargs, diff --git a/llama-index-core/llama_index/core/prompts/base.py b/llama-index-core/llama_index/core/prompts/base.py index b615585b7fb9c..80982d9f80f60 100644 --- a/llama-index-core/llama_index/core/prompts/base.py +++ b/llama-index-core/llama_index/core/prompts/base.py @@ -60,7 +60,7 @@ ] -class BasePromptTemplate(ChainableMixin, BaseModel, ABC): +class BasePromptTemplate(ChainableMixin, BaseModel, ABC): # type: ignore[no-redef] model_config = ConfigDict(arbitrary_types_allowed=True) metadata: Dict[str, Any] template_vars: List[str] @@ -146,7 +146,7 @@ def _as_query_component( return PromptComponent(prompt=self, format_messages=False, llm=llm) -class PromptTemplate(BasePromptTemplate): +class PromptTemplate(BasePromptTemplate): # type: ignore[no-redef] template: str def __init__( @@ -227,12 +227,12 @@ def get_template(self, llm: Optional[BaseLLM] = None) -> str: return self.template -class ChatPromptTemplate(BasePromptTemplate): +class ChatPromptTemplate(BasePromptTemplate): # type: ignore[no-redef] message_templates: List[ChatMessage] def __init__( self, - message_templates: List[ChatMessage], + message_templates: Sequence[ChatMessage], prompt_type: str = PromptType.CUSTOM, output_parser: Optional[BaseOutputParser] = None, metadata: Optional[Dict[str, Any]] = None, @@ -267,10 +267,10 @@ def from_messages( """From messages.""" if isinstance(message_templates[0], tuple): message_templates = [ - ChatMessage.from_str(role=role, content=content) + ChatMessage.from_str(role=role, content=content) # type: ignore[arg-type] for role, content in message_templates ] - return cls(message_templates=message_templates, **kwargs) + return cls(message_templates=message_templates, **kwargs) # type: ignore[arg-type] def partial_format(self, **kwargs: Any) -> "ChatPromptTemplate": prompt = deepcopy(self) @@ -332,17 +332,17 @@ def _as_query_component( return PromptComponent(prompt=self, format_messages=True, llm=llm) -class SelectorPromptTemplate(BasePromptTemplate): +class SelectorPromptTemplate(BasePromptTemplate): # type: ignore[no-redef] default_template: SerializeAsAny[BasePromptTemplate] conditionals: Optional[ - List[Tuple[Callable[[BaseLLM], bool], BasePromptTemplate]] + Sequence[Tuple[Callable[[BaseLLM], bool], BasePromptTemplate]] ] = None def __init__( self, default_template: BasePromptTemplate, conditionals: Optional[ - List[Tuple[Callable[[BaseLLM], bool], BasePromptTemplate]] + Sequence[Tuple[Callable[[BaseLLM], bool], BasePromptTemplate]] ] = None, ): metadata = default_template.metadata @@ -404,7 +404,7 @@ def get_template(self, llm: Optional[BaseLLM] = None) -> str: return prompt.get_template(llm=llm) -class LangchainPromptTemplate(BasePromptTemplate): +class LangchainPromptTemplate(BasePromptTemplate): # type: ignore[no-redef] selector: Any requires_langchain_llm: bool = False diff --git a/llama-index-core/llama_index/core/query_engine/citation_query_engine.py b/llama-index-core/llama_index/core/query_engine/citation_query_engine.py index 7611b6090f7c0..5209e53dc2931 100644 --- a/llama-index-core/llama_index/core/query_engine/citation_query_engine.py +++ b/llama-index-core/llama_index/core/query_engine/citation_query_engine.py @@ -219,7 +219,7 @@ def _create_citation_nodes(self, nodes: List[NodeWithScore]) -> List[NodeWithSco new_node = NodeWithScore( node=TextNode.model_validate(node.node), score=node.score ) - new_node.node.text = text + new_node.node.set_content(text) new_nodes.append(new_node) return new_nodes diff --git a/llama-index-core/llama_index/core/query_engine/jsonalyze_query_engine.py b/llama-index-core/llama_index/core/query_engine/jsonalyze_query_engine.py index 8514179138ec3..c3aa2c92973f1 100644 --- a/llama-index-core/llama_index/core/query_engine/jsonalyze_query_engine.py +++ b/llama-index-core/llama_index/core/query_engine/jsonalyze_query_engine.py @@ -74,7 +74,7 @@ def default_jsonalyzer( db = sqlite_utils.Database(memory=True) try: # Load list of dictionaries into SQLite database - db[table_name].insert_all(list_of_dict) + db[table_name].insert_all(list_of_dict) # type: ignore except sqlite_utils.utils.sqlite3.IntegrityError as exc: print_text(f"Error inserting into table {table_name}, expected format:") print_text("[{col1: val1, col2: val2, ...}, ...]") @@ -142,7 +142,7 @@ async def async_default_jsonalyzer( db = sqlite_utils.Database(memory=True) try: # Load list of dictionaries into SQLite database - db[table_name].insert_all(list_of_dict) + db[table_name].insert_all(list_of_dict) # type: ignore except sqlite_utils.utils.sqlite3.IntegrityError as exc: print_text(f"Error inserting into table {table_name}, expected format:") print_text("[{col1: val1, col2: val2, ...}, ...]") diff --git a/llama-index-core/llama_index/core/query_engine/multi_modal.py b/llama-index-core/llama_index/core/query_engine/multi_modal.py index 3b2bb243855dd..ba48b42665b36 100644 --- a/llama-index-core/llama_index/core/query_engine/multi_modal.py +++ b/llama-index-core/llama_index/core/query_engine/multi_modal.py @@ -121,7 +121,11 @@ def synthesize( llm_response = self._multi_modal_llm.complete( prompt=fmt_prompt, - image_documents=[image_node.node for image_node in image_nodes], + image_documents=[ + image_node.node + for image_node in image_nodes + if isinstance(image_node.node, ImageNode) + ], ) return Response( response=str(llm_response), @@ -132,15 +136,19 @@ def synthesize( def _get_response_with_images( self, prompt_str: str, - image_nodes: List[ImageNode], + image_nodes: List[NodeWithScore], ) -> RESPONSE_TYPE: + assert all(isinstance(node.node, ImageNode) for node in image_nodes) + fmt_prompt = self._image_qa_template.format( query_str=prompt_str, ) llm_response = self._multi_modal_llm.complete( prompt=fmt_prompt, - image_documents=[image_node.node for image_node in image_nodes], + image_documents=[ + node.node for node in image_nodes if isinstance(node.node, ImageNode) + ], ) return Response( response=str(llm_response), @@ -161,9 +169,14 @@ async def asynthesize( fmt_prompt = self._text_qa_template.format( context_str=context_str, query_str=query_bundle.query_str ) + llm_response = await self._multi_modal_llm.acomplete( prompt=fmt_prompt, - image_documents=[image_node.node for image_node in image_nodes], + image_documents=[ + image_node.node + for image_node in image_nodes + if isinstance(image_node.node, ImageNode) + ], ) return Response( response=str(llm_response), diff --git a/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py b/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py index 06c3f5a2cbc1a..b2650fffb15fd 100644 --- a/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py +++ b/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py @@ -64,6 +64,7 @@ def from_args( llm: Optional[LLM] = None, response_synthesizer: Optional[BaseSynthesizer] = None, node_postprocessors: Optional[List[BaseNodePostprocessor]] = None, + callback_manager: Optional[CallbackManager] = None, # response synthesizer args response_mode: ResponseMode = ResponseMode.COMPACT, text_qa_template: Optional[BasePromptTemplate] = None, @@ -81,6 +82,7 @@ def from_args( retriever (BaseRetriever): A retriever object. node_postprocessors (Optional[List[BaseNodePostprocessor]]): A list of node postprocessors. + callback_manager (Optional[CallbackManager]): A callback manager. verbose (bool): Whether to print out debug info. response_mode (ResponseMode): A ResponseMode object. text_qa_template (Optional[BasePromptTemplate]): A BasePromptTemplate @@ -108,7 +110,7 @@ def from_args( streaming=streaming, ) - callback_manager = Settings.callback_manager + callback_manager = callback_manager or Settings.callback_manager return cls( retriever=retriever, diff --git a/llama-index-core/llama_index/core/query_engine/retry_query_engine.py b/llama-index-core/llama_index/core/query_engine/retry_query_engine.py index 63f7621637dc0..94002f3318d6b 100644 --- a/llama-index-core/llama_index/core/query_engine/retry_query_engine.py +++ b/llama-index-core/llama_index/core/query_engine/retry_query_engine.py @@ -2,7 +2,11 @@ from typing import Optional from llama_index.core.base.base_query_engine import BaseQueryEngine -from llama_index.core.base.response.schema import RESPONSE_TYPE, Response +from llama_index.core.base.response.schema import ( + RESPONSE_TYPE, + Response, + AsyncStreamingResponse, +) from llama_index.core.callbacks.base import CallbackManager from llama_index.core.evaluation.base import BaseEvaluator from llama_index.core.evaluation.guideline import GuidelineEvaluator @@ -44,6 +48,7 @@ def _get_prompt_modules(self) -> PromptMixinType: def _query(self, query_bundle: QueryBundle) -> RESPONSE_TYPE: """Answer a query.""" response = self._query_engine._query(query_bundle) + assert not isinstance(response, AsyncStreamingResponse) if self.max_retries <= 0: return response typed_response = ( @@ -108,6 +113,7 @@ def _get_prompt_modules(self) -> PromptMixinType: def _query(self, query_bundle: QueryBundle) -> RESPONSE_TYPE: """Answer a query.""" response = self._query_engine._query(query_bundle) + assert not isinstance(response, AsyncStreamingResponse) if self.max_retries <= 0: return response typed_response = ( diff --git a/llama-index-core/llama_index/core/query_engine/router_query_engine.py b/llama-index-core/llama_index/core/query_engine/router_query_engine.py index ecb68e9996fb1..856bf851e3e28 100644 --- a/llama-index-core/llama_index/core/query_engine/router_query_engine.py +++ b/llama-index-core/llama_index/core/query_engine/router_query_engine.py @@ -1,5 +1,5 @@ import logging -from typing import Callable, List, Optional, Sequence, Any +from typing import Callable, Generator, List, Optional, Sequence, Any from llama_index.core.async_utils import run_async_tasks from llama_index.core.base.base_query_engine import BaseQueryEngine @@ -10,6 +10,7 @@ PydanticResponse, Response, StreamingResponse, + AsyncStreamingResponse, ) from llama_index.core.bridge.pydantic import BaseModel from llama_index.core.callbacks.base import CallbackManager @@ -42,6 +43,8 @@ def combine_responses( for response in responses: if isinstance(response, (StreamingResponse, PydanticResponse)): response_obj = response.get_response() + elif isinstance(response, AsyncStreamingResponse): + raise ValueError("AsyncStreamingResponse not supported in sync code.") else: response_obj = response source_nodes.extend(response_obj.source_nodes) @@ -53,8 +56,10 @@ def combine_responses( return Response(response=summary, source_nodes=source_nodes) elif isinstance(summary, BaseModel): return PydanticResponse(response=summary, source_nodes=source_nodes) - else: + elif isinstance(summary, Generator): return StreamingResponse(response_gen=summary, source_nodes=source_nodes) + else: + return AsyncStreamingResponse(response_gen=summary, source_nodes=source_nodes) async def acombine_responses( @@ -68,6 +73,8 @@ async def acombine_responses( for response in responses: if isinstance(response, (StreamingResponse, PydanticResponse)): response_obj = response.get_response() + elif isinstance(response, AsyncStreamingResponse): + response_obj = await response.get_response() else: response_obj = response source_nodes.extend(response_obj.source_nodes) @@ -79,8 +86,10 @@ async def acombine_responses( return Response(response=summary, source_nodes=source_nodes) elif isinstance(summary, BaseModel): return PydanticResponse(response=summary, source_nodes=source_nodes) - else: + elif isinstance(summary, Generator): return StreamingResponse(response_gen=summary, source_nodes=source_nodes) + else: + return AsyncStreamingResponse(response_gen=summary, source_nodes=source_nodes) class RouterQueryEngine(BaseQueryEngine): diff --git a/llama-index-core/llama_index/core/query_engine/sql_join_query_engine.py b/llama-index-core/llama_index/core/query_engine/sql_join_query_engine.py index f80959a86bf3a..896eaea7c35fe 100644 --- a/llama-index-core/llama_index/core/query_engine/sql_join_query_engine.py +++ b/llama-index-core/llama_index/core/query_engine/sql_join_query_engine.py @@ -216,7 +216,7 @@ def __init__( self._llm = llm or Settings.llm - self._selector = selector or get_selector_from_llm(self._llm, is_multi=False) + self._selector = selector or get_selector_from_llm(self._llm, is_multi=False) # type: ignore assert isinstance(self._selector, (LLMSingleSelector, PydanticSingleSelector)) self._sql_join_synthesis_prompt = ( diff --git a/llama-index-core/llama_index/core/query_pipeline/components/loop.py b/llama-index-core/llama_index/core/query_pipeline/components/loop.py index 9224fd95562f1..ebc3d7041f168 100644 --- a/llama-index-core/llama_index/core/query_pipeline/components/loop.py +++ b/llama-index-core/llama_index/core/query_pipeline/components/loop.py @@ -28,7 +28,7 @@ class LoopComponent(QueryComponent): ..., description="Add output to input function. If not provided, will reuse the original input for the next iteration. If provided, will call the function to combine the output into the input for the next iteration.", ) - max_iterations: Optional[int] = Field(5, description="Max iterations") + max_iterations: int = Field(default=5, description="Max iterations") def __init__( self, @@ -55,7 +55,7 @@ def _validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]: def _run_component(self, **kwargs: Any) -> Dict: """Run component.""" current_input = kwargs - for i in range(self.max_iterations): + for _ in range(self.max_iterations): output = self.pipeline.run_component(**current_input) if self.should_exit_fn: should_exit = self.should_exit_fn(output) @@ -70,7 +70,7 @@ def _run_component(self, **kwargs: Any) -> Dict: async def _arun_component(self, **kwargs: Any) -> Any: """Run component (async).""" current_input = kwargs - for i in range(self.max_iterations): + for _ in range(self.max_iterations): output = await self.pipeline.arun_component(**current_input) if self.should_exit_fn: should_exit = self.should_exit_fn(output) diff --git a/llama-index-core/llama_index/core/query_pipeline/query.py b/llama-index-core/llama_index/core/query_pipeline/query.py index 3e40dc838e444..3ba8b0563c5bc 100644 --- a/llama-index-core/llama_index/core/query_pipeline/query.py +++ b/llama-index-core/llama_index/core/query_pipeline/query.py @@ -239,9 +239,10 @@ def __init__( state: Optional[Dict[str, Any]] = None, **kwargs: Any, ): + state = state or {} super().__init__( callback_manager=callback_manager or CallbackManager([]), - state=state or {}, + state=state, **kwargs, ) @@ -450,7 +451,7 @@ def run_with_intermediates( **kwargs, ) - def merge_dicts(self, d1, d2): + def merge_dicts(self, d1: Dict[str, Any], d2: Dict[str, Any]) -> Dict[str, Any]: """Merge two dictionaries recursively, combining values of the same key into a list.""" merged = {} for key in set(d1).union(d2): @@ -458,10 +459,11 @@ def merge_dicts(self, d1, d2): if isinstance(d1[key], dict) and isinstance(d2[key], dict): merged[key] = self.merge_dicts(d1[key], d2[key]) else: - merged[key] = ( - [d1[key]] if not isinstance(d1[key], list) else d1[key] - ) - merged[key].append(d2[key]) + new_val = [d1[key]] if not isinstance(d1[key], list) else d1[key] + assert isinstance(new_val, list) + + new_val.append(d2[key]) + merged[key] = new_val # type: ignore[assignment] else: merged[key] = d1.get(key, d2.get(key)) return merged @@ -481,7 +483,7 @@ def run_multi( payload={EventPayload.QUERY_STR: json.dumps(module_input_dict)}, ) as query_event: if batch: - outputs = {} + outputs: Dict[str, Any] = {} batch_lengths = { len(values) @@ -607,7 +609,7 @@ async def arun_multi( payload={EventPayload.QUERY_STR: json.dumps(module_input_dict)}, ) as query_event: if batch: - outputs = {} + outputs: Dict[str, Any] = {} batch_lengths = { len(values) @@ -754,7 +756,7 @@ def _run( raise ValueError("Length of batch inputs must be the same.") # List of individual inputs from batch input - kwargs = [ + kwargs_list = [ dict(zip(kwargs.keys(), values)) for values in zip(*kwargs.values()) ] @@ -762,7 +764,7 @@ def _run( self._arun_multi( {root_key: kwarg}, show_intermediates=show_intermediates ) - for kwarg in kwargs + for kwarg in kwargs_list ] results = asyncio_run(run_jobs(jobs, workers=len(jobs))) @@ -773,15 +775,17 @@ def _run( ) intermediates.append(result[1]) - return result_outputs, intermediates + return result_outputs, intermediates # type: ignore[return-value] else: - result_outputs, intermediates = self._run_multi( + result_output_dicts, intermediate_dicts = self._run_multi( {root_key: kwargs}, show_intermediates=show_intermediates ) return ( - self._get_single_result_output(result_outputs, return_values_direct), - intermediates, + self._get_single_result_output( + result_output_dicts, return_values_direct + ), + intermediate_dicts, ) @dispatcher.span @@ -810,7 +814,7 @@ async def _arun( raise ValueError("Length of batch inputs must be the same.") # List of individual inputs from batch input - kwargs = [ + kwargs_list = [ dict(zip(kwargs.keys(), values)) for values in zip(*kwargs.values()) ] @@ -818,7 +822,7 @@ async def _arun( self._arun_multi( {root_key: kwarg}, show_intermediates=show_intermediates ) - for kwarg in kwargs + for kwarg in kwargs_list ] results = await run_jobs(jobs, workers=len(jobs)) @@ -829,15 +833,17 @@ async def _arun( ) intermediates.append(result[1]) - return result_outputs, intermediates + return result_outputs, intermediates # type: ignore[return-value] else: - result_outputs, intermediates = await self._arun_multi( + result_output_dicts, intermediate_dicts = await self._arun_multi( {root_key: kwargs}, show_intermediates=show_intermediates ) return ( - self._get_single_result_output(result_outputs, return_values_direct), - intermediates, + self._get_single_result_output( + result_output_dicts, return_values_direct + ), + intermediate_dicts, ) def _validate_inputs(self, module_input_dict: Dict[str, Any]) -> None: @@ -855,7 +861,7 @@ def process_component_output( output_dict: Dict[str, Any], module_key: str, run_state: RunState, - ): + ) -> None: """Process component output.""" if module_key in self._get_leaf_keys(): run_state.result_outputs[module_key] = output_dict @@ -911,7 +917,7 @@ def get_run_state( @dispatcher.span def _run_multi( - self, module_input_dict: Dict[str, Any], show_intermediates=False + self, module_input_dict: Dict[str, Any], show_intermediates: bool = False ) -> Tuple[Dict[str, Any], Dict[str, ComponentIntermediates]]: """Run the pipeline for multiple roots.""" self._validate_inputs(module_input_dict) diff --git a/llama-index-core/llama_index/core/question_gen/llm_generators.py b/llama-index-core/llama_index/core/question_gen/llm_generators.py index 4c14520b1c061..4aa6b8e6527ce 100644 --- a/llama-index-core/llama_index/core/question_gen/llm_generators.py +++ b/llama-index-core/llama_index/core/question_gen/llm_generators.py @@ -60,7 +60,8 @@ def _update_prompts(self, prompts: PromptDictType) -> None: if output_parser is None: output_parser = SubQuestionOutputParser() self._prompt = PromptTemplate( - prompts["question_gen_prompt"].template, output_parser=output_parser + prompts["question_gen_prompt"].get_template(llm=self._llm), + output_parser=output_parser, ) def generate( diff --git a/llama-index-core/llama_index/core/readers/base.py b/llama-index-core/llama_index/core/readers/base.py index 667513b89e328..e2330582779e9 100644 --- a/llama-index-core/llama_index/core/readers/base.py +++ b/llama-index-core/llama_index/core/readers/base.py @@ -11,8 +11,7 @@ if TYPE_CHECKING: from llama_index.core.bridge.langchain import Document as LCDocument -from llama_index.core.bridge.pydantic import Field, GetJsonSchemaHandler, ConfigDict -from llama_index.core.bridge.pydantic_core import CoreSchema +from llama_index.core.bridge.pydantic import Field, ConfigDict from llama_index.core.schema import BaseComponent, Document @@ -45,24 +44,6 @@ def load_langchain_documents(self, **load_kwargs: Any) -> List["LCDocument"]: docs = self.load_data(**load_kwargs) return [d.to_langchain_format() for d in docs] - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler - ) -> Dict[str, Any]: - json_schema = super().__get_pydantic_json_schema__(core_schema, handler) - json_schema = handler.resolve_ref_schema(json_schema) - json_schema.update({"title": cls.__name__}) - return json_schema - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema, handler - ): # Needed for pydantic v2 to work - json_schema = handler(core_schema) - json_schema = handler.resolve_ref_schema(json_schema) - json_schema["title"] = cls.__name__ - return json_schema - class BasePydanticReader(BaseReader, BaseComponent): """Serialiable Data Loader with Pydantic.""" diff --git a/llama-index-core/llama_index/core/readers/file/base.py b/llama-index-core/llama_index/core/readers/file/base.py index 6c3c8e3e33f46..d70030e08db84 100644 --- a/llama-index-core/llama_index/core/readers/file/base.py +++ b/llama-index-core/llama_index/core/readers/file/base.py @@ -13,7 +13,7 @@ from pathlib import Path, PurePosixPath import fsspec from fsspec.implementations.local import LocalFileSystem -from typing import Any, Callable, Dict, Generator, List, Optional, Type +from typing import Any, Callable, Dict, Generator, List, Optional, Type, Set from llama_index.core.readers.base import BaseReader, ResourcesReaderMixin from llama_index.core.async_utils import run_jobs, get_asyncio_module @@ -23,7 +23,7 @@ class FileSystemReaderMixin(ABC): @abstractmethod - def read_file_content(self, input_file: Path, **kwargs) -> bytes: + def read_file_content(self, input_file: Path, **kwargs: Any) -> bytes: """ Read the bytes content of a file. @@ -34,7 +34,7 @@ def read_file_content(self, input_file: Path, **kwargs) -> bytes: bytes: File content. """ - async def aread_file_content(self, input_file: Path, **kwargs) -> bytes: + async def aread_file_content(self, input_file: Path, **kwargs: Any) -> bytes: """ Read the bytes content of a file asynchronously. @@ -277,9 +277,9 @@ def is_hidden(self, path: Path) -> bool: def _add_files(self, input_dir: Path) -> List[Path]: """Add files.""" - all_files = set() - rejected_files = set() - rejected_dirs = set() + all_files: Set[Path] = set() + rejected_files: Set[Path] = set() + rejected_dirs: Set[Path] = set() # Default to POSIX paths for non-default file systems (e.g. S3) _Path = Path if is_default_fs(self.fs) else PurePosixPath @@ -303,10 +303,10 @@ def _add_files(self, input_dir: Path) -> List[Path]: else: file_refs = self.fs.glob(str(input_dir) + "/*") - for ref in file_refs: + for _ref in file_refs: # Manually check if file is hidden or directory instead of # in glob for backwards compatibility. - ref = _Path(ref) + ref = _Path(_ref) is_dir = self.fs.isdir(ref) skip_because_hidden = self.exclude_hidden and self.is_hidden(ref) skip_because_bad_ext = ( @@ -389,9 +389,9 @@ def _exclude_metadata(self, documents: List[Document]) -> List[Document]: return documents - def list_resources(self, *args: Any, **kwargs: Any) -> List[Path]: + def list_resources(self, *args: Any, **kwargs: Any) -> List[str]: """List files in the given filesystem.""" - return self.input_files + return [str(x) for x in self.input_files] def get_resource_info(self, resource_id: str, *args: Any, **kwargs: Any) -> Dict: info_result = self.fs.info(resource_id) @@ -465,7 +465,7 @@ async def aload_resource( **kwargs, ) - def read_file_content(self, input_file: Path, **kwargs) -> bytes: + def read_file_content(self, input_file: Path, **kwargs: Any) -> bytes: """Read file content.""" fs: fsspec.AbstractFileSystem = kwargs.get("fs", self.fs) with fs.open(input_file, errors=self.errors, encoding=self.encoding) as f: @@ -572,7 +572,17 @@ def load_file( return documents - async def aload_file(self, input_file: Path) -> List[Document]: + @staticmethod + async def aload_file( + input_file: Path, + file_metadata: Callable[[str], Dict], + file_extractor: Dict[str, BaseReader], + filename_as_id: bool = False, + encoding: str = "utf-8", + errors: str = "ignore", + raise_on_error: bool = False, + fs: Optional[fsspec.AbstractFileSystem] = None, + ) -> List[Document]: """Load file asynchronously.""" # TODO: make this less redundant default_file_reader_cls = SimpleDirectoryReader.supported_suffix_fn() @@ -580,33 +590,30 @@ async def aload_file(self, input_file: Path) -> List[Document]: metadata: Optional[dict] = None documents: List[Document] = [] - if self.file_metadata is not None: - metadata = self.file_metadata(str(input_file)) + if file_metadata is not None: + metadata = file_metadata(str(input_file)) file_suffix = input_file.suffix.lower() - if ( - file_suffix in default_file_reader_suffix - or file_suffix in self.file_extractor - ): + if file_suffix in default_file_reader_suffix or file_suffix in file_extractor: # use file readers - if file_suffix not in self.file_extractor: + if file_suffix not in file_extractor: # instantiate file reader if not already reader_cls = default_file_reader_cls[file_suffix] - self.file_extractor[file_suffix] = reader_cls() - reader = self.file_extractor[file_suffix] + file_extractor[file_suffix] = reader_cls() + reader = file_extractor[file_suffix] # load data -- catch all errors except for ImportError try: kwargs = {"extra_info": metadata} - if self.fs and not is_default_fs(self.fs): - kwargs["fs"] = self.fs + if fs and not is_default_fs(fs): + kwargs["fs"] = fs docs = await reader.aload_data(input_file, **kwargs) except ImportError as e: # ensure that ImportError is raised so user knows # about missing dependencies raise ImportError(str(e)) except Exception as e: - if self.raise_on_error: + if raise_on_error: raise # otherwise, just skip the file and report the error print( @@ -616,19 +623,19 @@ async def aload_file(self, input_file: Path) -> List[Document]: return [] # iterate over docs if needed - if self.filename_as_id: + if filename_as_id: for i, doc in enumerate(docs): doc.id_ = f"{input_file!s}_part_{i}" documents.extend(docs) else: # do standard read - fs = self.fs or get_default_fs() - with fs.open(input_file, errors=self.errors, encoding=self.encoding) as f: - data = f.read().decode(self.encoding, errors=self.errors) + fs = fs or get_default_fs() + with fs.open(input_file, errors=errors, encoding=encoding) as f: + data = f.read().decode(encoding, errors=errors) doc = Document(text=data, metadata=metadata or {}) - if self.filename_as_id: + if filename_as_id: doc.id_ = str(input_file) documents.append(doc) @@ -722,7 +729,20 @@ async def aload_data( files_to_process = self.input_files fs = fs or self.fs - coroutines = [self.aload_file(input_file) for input_file in files_to_process] + coroutines = [ + SimpleDirectoryReader.aload_file( + input_file, + self.file_metadata, + self.file_extractor, + self.filename_as_id, + self.encoding, + self.errors, + self.raise_on_error, + fs, + ) + for input_file in files_to_process + ] + if num_workers: document_lists = await run_jobs( coroutines, show_progress=show_progress, workers=num_workers diff --git a/llama-index-core/llama_index/core/response/notebook_utils.py b/llama-index-core/llama_index/core/response/notebook_utils.py index 18d2f7126e461..1f89120b43740 100644 --- a/llama-index-core/llama_index/core/response/notebook_utils.py +++ b/llama-index-core/llama_index/core/response/notebook_utils.py @@ -125,7 +125,7 @@ def display_query_and_multimodal_response( image = None if img_node.image_url: img_response = requests.get(img_node.image_url) - image = Image.open(BytesIO(img_response.content)) + image = Image.open(BytesIO(img_response.content)).convert("RGB") elif img_node.image_path: image = Image.open(img_node.image_path).convert("RGB") else: diff --git a/llama-index-core/llama_index/core/response/utils.py b/llama-index-core/llama_index/core/response/utils.py index 1b8122437957e..9c768515fb6c7 100644 --- a/llama-index-core/llama_index/core/response/utils.py +++ b/llama-index-core/llama_index/core/response/utils.py @@ -1,6 +1,6 @@ """Utilities for response.""" -from typing import Generator +from typing import AsyncGenerator, Generator def get_response_text(response_gen: Generator) -> str: @@ -11,7 +11,7 @@ def get_response_text(response_gen: Generator) -> str: return response_text -async def aget_response_text(response_gen: Generator) -> str: +async def aget_response_text(response_gen: AsyncGenerator) -> str: """Get response text.""" response_text = "" async for response in response_gen: diff --git a/llama-index-core/llama_index/core/response_synthesizers/accumulate.py b/llama-index-core/llama_index/core/response_synthesizers/accumulate.py index dcc20d6a5b62f..6fbdea50453cd 100644 --- a/llama-index-core/llama_index/core/response_synthesizers/accumulate.py +++ b/llama-index-core/llama_index/core/response_synthesizers/accumulate.py @@ -2,6 +2,7 @@ from typing import Any, Callable, List, Optional, Sequence from llama_index.core.async_utils import run_async_tasks +from llama_index.core.bridge.pydantic import BaseModel from llama_index.core.callbacks.base import CallbackManager from llama_index.core.indices.prompt_helper import PromptHelper from llama_index.core.llms import LLM @@ -23,7 +24,7 @@ def __init__( callback_manager: Optional[CallbackManager] = None, prompt_helper: Optional[PromptHelper] = None, text_qa_template: Optional[BasePromptTemplate] = None, - output_cls: Optional[Any] = None, + output_cls: Optional[BaseModel] = None, streaming: bool = False, use_async: bool = False, ) -> None: @@ -35,7 +36,7 @@ def __init__( ) self._text_qa_template = text_qa_template or DEFAULT_TEXT_QA_PROMPT_SEL self._use_async = use_async - self._output_cls = output_cls + self._output_cls = output_cls # type: ignore def _get_prompts(self) -> PromptDictType: """Get prompts.""" @@ -132,9 +133,9 @@ def _give_responses( ] else: predictor = ( - self._llm.astructured_predict + self._llm.astructured_predict # type: ignore if use_async - else self._llm.structured_predict + else self._llm.structured_predict # type: ignore ) return [ diff --git a/llama-index-core/llama_index/core/response_synthesizers/base.py b/llama-index-core/llama_index/core/response_synthesizers/base.py index 38d3fd656240d..f992ebac4d57b 100644 --- a/llama-index-core/llama_index/core/response_synthesizers/base.py +++ b/llama-index-core/llama_index/core/response_synthesizers/base.py @@ -160,7 +160,7 @@ def _prepare_response_output( if isinstance(self._llm, StructuredLLM): # convert string to output_cls - output = self._llm.output_cls.model_validate_json(response_str) + output = self._llm.output_cls.model_validate_json(str(response_str)) return PydanticResponse( output, source_nodes=source_nodes, @@ -186,7 +186,7 @@ def _prepare_response_output( metadata=response_metadata, ) - if isinstance(response_str, self._output_cls): + if isinstance(response_str, self._output_cls): # type: ignore return PydanticResponse( response_str, source_nodes=source_nodes, metadata=response_metadata ) @@ -211,16 +211,16 @@ def synthesize( if len(nodes) == 0: if self._streaming: - empty_response = StreamingResponse( + empty_response_stream = StreamingResponse( response_gen=empty_response_generator() ) dispatcher.event( SynthesizeEndEvent( query=query, - response=empty_response, + response=empty_response_stream, ) ) - return empty_response + return empty_response_stream else: empty_response = Response("Empty Response") dispatcher.event( @@ -276,16 +276,16 @@ async def asynthesize( ) if len(nodes) == 0: if self._streaming: - empty_response = AsyncStreamingResponse( + empty_response_stream = AsyncStreamingResponse( response_gen=empty_response_agenerator() ) dispatcher.event( SynthesizeEndEvent( query=query, - response=empty_response, + response=empty_response_stream, ) ) - return empty_response + return empty_response_stream else: empty_response = Response("Empty Response") dispatcher.event( diff --git a/llama-index-core/llama_index/core/response_synthesizers/factory.py b/llama-index-core/llama_index/core/response_synthesizers/factory.py index 2240c119cf2e3..68fe8bd5feb65 100644 --- a/llama-index-core/llama_index/core/response_synthesizers/factory.py +++ b/llama-index-core/llama_index/core/response_synthesizers/factory.py @@ -11,7 +11,6 @@ ) from llama_index.core.prompts.default_prompts import DEFAULT_SIMPLE_INPUT_PROMPT from llama_index.core.llms import LLM -from llama_index.core.prompts.prompts import PromptTemplate from llama_index.core.response_synthesizers.accumulate import Accumulate from llama_index.core.response_synthesizers.base import BaseSynthesizer from llama_index.core.response_synthesizers.compact_and_accumulate import ( @@ -44,7 +43,9 @@ def get_response_synthesizer( streaming: bool = False, structured_answer_filtering: bool = False, output_cls: Optional[BaseModel] = None, - program_factory: Optional[Callable[[PromptTemplate], BasePydanticProgram]] = None, + program_factory: Optional[ + Callable[[BasePromptTemplate], BasePydanticProgram] + ] = None, verbose: bool = False, ) -> BaseSynthesizer: """Get a response synthesizer.""" diff --git a/llama-index-core/llama_index/core/response_synthesizers/refine.py b/llama-index-core/llama_index/core/response_synthesizers/refine.py index cff9e9571c88d..1f17178a707b0 100644 --- a/llama-index-core/llama_index/core/response_synthesizers/refine.py +++ b/llama-index-core/llama_index/core/response_synthesizers/refine.py @@ -15,7 +15,7 @@ from llama_index.core.indices.prompt_helper import PromptHelper from llama_index.core.indices.utils import truncate_text from llama_index.core.llms import LLM -from llama_index.core.prompts.base import BasePromptTemplate, PromptTemplate +from llama_index.core.prompts.base import BasePromptTemplate from llama_index.core.prompts.default_prompt_selectors import ( DEFAULT_REFINE_PROMPT_SEL, DEFAULT_TEXT_QA_PROMPT_SEL, @@ -58,7 +58,12 @@ class DefaultRefineProgram(BasePydanticProgram): query_satisfied=True. In effect, doesn't do any answer filtering. """ - def __init__(self, prompt: BasePromptTemplate, llm: LLM, output_cls: BaseModel): + def __init__( + self, + prompt: BasePromptTemplate, + llm: LLM, + output_cls: Optional[BaseModel] = None, + ): self._prompt = prompt self._llm = llm self._output_cls = output_cls @@ -69,7 +74,7 @@ def output_cls(self) -> Type[BaseModel]: def __call__(self, *args: Any, **kwds: Any) -> StructuredRefineResponse: if self._output_cls is not None: - answer = self._llm.structured_predict( + answer = self._llm.structured_predict( # type: ignore self._output_cls, self._prompt, **kwds, @@ -84,7 +89,7 @@ def __call__(self, *args: Any, **kwds: Any) -> StructuredRefineResponse: async def acall(self, *args: Any, **kwds: Any) -> StructuredRefineResponse: if self._output_cls is not None: - answer = await self._llm.astructured_predict( + answer = await self._llm.astructured_predict( # type: ignore self._output_cls, self._prompt, **kwds, @@ -126,7 +131,7 @@ def __init__( self._refine_template = refine_template or DEFAULT_REFINE_PROMPT_SEL self._verbose = verbose self._structured_answer_filtering = structured_answer_filtering - self._output_cls = output_cls + self._output_cls = output_cls # type: ignore if self._streaming and self._structured_answer_filtering: raise ValueError( @@ -188,20 +193,22 @@ def get_response( dispatcher.event(GetResponseEndEvent()) return response - def _default_program_factory(self, prompt: PromptTemplate) -> BasePydanticProgram: + def _default_program_factory( + self, prompt: BasePromptTemplate + ) -> BasePydanticProgram: if self._structured_answer_filtering: from llama_index.core.program.utils import get_program_for_llm return get_program_for_llm( - StructuredRefineResponse, + StructuredRefineResponse, # type: ignore prompt, - self._llm, + self._llm, # type: ignore verbose=self._verbose, ) else: return DefaultRefineProgram( prompt=prompt, - llm=self._llm, + llm=self._llm, # type: ignore output_cls=self._output_cls, ) diff --git a/llama-index-core/llama_index/core/response_synthesizers/tree_summarize.py b/llama-index-core/llama_index/core/response_synthesizers/tree_summarize.py index 3dfbde1022283..003a006f1bd94 100644 --- a/llama-index-core/llama_index/core/response_synthesizers/tree_summarize.py +++ b/llama-index-core/llama_index/core/response_synthesizers/tree_summarize.py @@ -89,7 +89,7 @@ async def aget_response( **response_kwargs, ) else: - response = await self._llm.astructured_predict( + response = await self._llm.astructured_predict( # type: ignore self._output_cls, summary_template, context_str=text_chunks[0], @@ -112,7 +112,7 @@ async def aget_response( ] else: tasks = [ - self._llm.astructured_predict( + self._llm.astructured_predict( # type: ignore self._output_cls, summary_template, context_str=text_chunk, @@ -165,7 +165,7 @@ def get_response( **response_kwargs, ) else: - response = self._llm.structured_predict( + response = self._llm.structured_predict( # type: ignore self._output_cls, summary_template, context_str=text_chunks[0], @@ -188,7 +188,7 @@ def get_response( ] else: tasks = [ - self._llm.astructured_predict( + self._llm.astructured_predict( # type: ignore self._output_cls, summary_template, context_str=text_chunk, @@ -217,7 +217,7 @@ def get_response( ] else: summaries = [ - self._llm.structured_predict( + self._llm.structured_predict( # type: ignore self._output_cls, summary_template, context_str=text_chunk, diff --git a/llama-index-core/llama_index/core/retrievers/auto_merging_retriever.py b/llama-index-core/llama_index/core/retrievers/auto_merging_retriever.py index 30e2971c21397..e04c5ac524c7f 100644 --- a/llama-index-core/llama_index/core/retrievers/auto_merging_retriever.py +++ b/llama-index-core/llama_index/core/retrievers/auto_merging_retriever.py @@ -11,7 +11,13 @@ from llama_index.core.indices.vector_store.retrievers.retriever import ( VectorIndexRetriever, ) -from llama_index.core.schema import BaseNode, IndexNode, NodeWithScore, QueryBundle +from llama_index.core.schema import ( + BaseNode, + IndexNode, + NodeWithScore, + MetadataMode, + QueryBundle, +) from llama_index.core.storage.storage_context import StorageContext logger = logging.getLogger(__name__) @@ -72,7 +78,7 @@ def _get_parents_and_merge( # compute ratios and "merge" nodes # merging: delete some children nodes, add some parent nodes node_ids_to_delete = set() - nodes_to_add: Dict[str, BaseNode] = {} + nodes_to_add: Dict[str, NodeWithScore] = {} for parent_node_id, parent_node in parent_nodes.items(): parent_child_nodes = parent_node.child_nodes parent_num_children = len(parent_child_nodes) if parent_child_nodes else 1 @@ -85,7 +91,9 @@ def _get_parents_and_merge( set({n.node.node_id for n in parent_cur_children}) ) - parent_node_text = truncate_text(parent_node.text, 100) + parent_node_text = truncate_text( + parent_node.get_content(metadata_mode=MetadataMode.NONE), 100 + ) info_str = ( f"> Merging {len(parent_cur_children)} nodes into parent node.\n" f"> Parent node id: {parent_node_id}.\n" @@ -138,7 +146,9 @@ def _fill_in_nodes( ) next_node = cast(BaseNode, next_node) - next_node_text = truncate_text(next_node.get_text(), 100) + next_node_text = truncate_text( + next_node.get_content(metadata_mode=MetadataMode.NONE), 100 + ) info_str = ( f"> Filling in node. Node id: {cur_node.next_node.node_id}" f"> Node text: {next_node_text}\n" diff --git a/llama-index-core/llama_index/core/retrievers/fusion_retriever.py b/llama-index-core/llama_index/core/retrievers/fusion_retriever.py index e64dee7bbbf08..ec349f1b01bae 100644 --- a/llama-index-core/llama_index/core/retrievers/fusion_retriever.py +++ b/llama-index-core/llama_index/core/retrievers/fusion_retriever.py @@ -147,7 +147,9 @@ def _relative_score_fusion( if not nodes_with_scores: min_max_scores[query_tuple] = (0.0, 0.0) continue - scores = [node_with_score.score for node_with_score in nodes_with_scores] + scores = [ + node_with_score.score or 0.0 for node_with_score in nodes_with_scores + ] if dist_based: # Set min and max based on mean and std dev mean_score = sum(scores) / len(scores) @@ -173,7 +175,10 @@ def _relative_score_fusion( ) # Scale by the weight of the retriever retriever_idx = query_tuple[1] - node_with_score.score *= self._retriever_weights[retriever_idx] + existing_score = node_with_score.score or 0.0 + node_with_score.score = ( + existing_score * self._retriever_weights[retriever_idx] + ) # Divide by the number of queries node_with_score.score /= self.num_queries @@ -185,7 +190,8 @@ def _relative_score_fusion( for node_with_score in nodes_with_scores: hash = node_with_score.node.hash if hash in all_nodes: - all_nodes[hash].score += node_with_score.score + cur_score = all_nodes[hash].score or 0.0 + all_nodes[hash].score = cur_score + (node_with_score.score or 0.0) else: all_nodes[hash] = node_with_score @@ -201,7 +207,9 @@ def _simple_fusion( for node_with_score in nodes_with_scores: hash = node_with_score.node.hash if hash in all_nodes: - max_score = max(node_with_score.score, all_nodes[hash].score) + max_score = max( + node_with_score.score or 0.0, all_nodes[hash].score or 0.0 + ) all_nodes[hash].score = max_score else: all_nodes[hash] = node_with_score diff --git a/llama-index-core/llama_index/core/schema.py b/llama-index-core/llama_index/core/schema.py index dd3984b28bb56..c7a3caa19306e 100644 --- a/llama-index-core/llama_index/core/schema.py +++ b/llama-index-core/llama_index/core/schema.py @@ -10,7 +10,7 @@ from enum import Enum, auto from hashlib import sha256 from io import BytesIO -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union from dataclasses_json import DataClassJsonMixin from llama_index.core.bridge.pydantic import ( @@ -52,13 +52,13 @@ class BaseComponent(BaseModel): def __get_pydantic_json_schema__( cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler ) -> JsonSchemaValue: - json_schema = handler(core_schema) - json_schema = handler.resolve_ref_schema(json_schema) - json_schema["properties"]["class_name"] = { - "title": "Class Name", - "type": "string", - "default": cls.class_name(), - } + json_schema = super().__get_pydantic_json_schema__(core_schema, handler) + if "properties" in json_schema: + json_schema["properties"]["class_name"] = { + "title": "Class Name", + "type": "string", + "default": cls.class_name(), + } return json_schema @classmethod @@ -136,9 +136,11 @@ def to_json(self, **kwargs: Any) -> str: # TODO: return type here not supported by current mypy version @classmethod def from_dict(cls, data: Dict[str, Any], **kwargs: Any) -> Self: # type: ignore + # In SimpleKVStore we rely on shallow coping. Hence, the data will be modified in the store directly. + # And it is the same when the user is passing a dictionary to create a component. We can't modify the passed down dictionary. + data = dict(data) if isinstance(kwargs, dict): data.update(kwargs) - data.pop("class_name", None) return cls(**data) @@ -154,10 +156,14 @@ class TransformComponent(BaseComponent, DispatcherSpanMixin): model_config = ConfigDict(arbitrary_types_allowed=True) @abstractmethod - def __call__(self, nodes: List["BaseNode"], **kwargs: Any) -> List["BaseNode"]: + def __call__( + self, nodes: Sequence["BaseNode"], **kwargs: Any + ) -> Sequence["BaseNode"]: """Transform nodes.""" - async def acall(self, nodes: List["BaseNode"], **kwargs: Any) -> List["BaseNode"]: + async def acall( + self, nodes: Sequence["BaseNode"], **kwargs: Any + ) -> Sequence["BaseNode"]: """Async transform nodes.""" return self.__call__(nodes, **kwargs) @@ -581,7 +587,7 @@ def from_dict(cls, data: Dict[str, Any], **kwargs: Any) -> Self: # type: ignore # check if its a node, else assume stringable try: - parsed_obj = json_to_doc(obj) + parsed_obj = json_to_doc(obj) # type: ignore[assignment] except Exception: parsed_obj = TextNode(text=str(obj)) @@ -709,11 +715,13 @@ def to_langchain_format(self) -> "LCDocument": from llama_index.core.bridge.langchain import Document as LCDocument metadata = self.metadata or {} - return LCDocument(page_content=self.text, metadata=metadata) + return LCDocument(page_content=self.text, metadata=metadata, id=self.id_) @classmethod def from_langchain_format(cls, doc: "LCDocument") -> "Document": """Convert struct from LangChain document format.""" + if doc.id: + return cls(text=doc.page_content, metadata=doc.metadata, id_=doc.id) return cls(text=doc.page_content, metadata=doc.metadata) def to_haystack_format(self) -> "HaystackDocument": diff --git a/llama-index-core/llama_index/core/selectors/__init__.py b/llama-index-core/llama_index/core/selectors/__init__.py index c8ca9281e7eda..729371d9b7e96 100644 --- a/llama-index-core/llama_index/core/selectors/__init__.py +++ b/llama-index-core/llama_index/core/selectors/__init__.py @@ -1,3 +1,9 @@ +from llama_index.core.base.base_selector import ( + BaseSelector, + MultiSelection, + SingleSelection, + SelectorResult, +) from llama_index.core.selectors.embedding_selectors import EmbeddingSingleSelector from llama_index.core.selectors.llm_selectors import ( LLMMultiSelector, @@ -9,6 +15,12 @@ ) __all__ = [ + # Bases + Types + "BaseSelector", + "MultiSelection", + "SelectorResult", + "SingleSelection", + # Classes "LLMSingleSelector", "LLMMultiSelector", "EmbeddingSingleSelector", diff --git a/llama-index-core/llama_index/core/selectors/llm_selectors.py b/llama-index-core/llama_index/core/selectors/llm_selectors.py index b5b57cd776d80..8388623227dac 100644 --- a/llama-index-core/llama_index/core/selectors/llm_selectors.py +++ b/llama-index-core/llama_index/core/selectors/llm_selectors.py @@ -8,6 +8,7 @@ from llama_index.core.llms import LLM from llama_index.core.output_parsers.base import StructuredOutput from llama_index.core.output_parsers.selection import Answer, SelectionOutputParser +from llama_index.core.prompts.base import BasePromptTemplate from llama_index.core.prompts.mixin import PromptDictType from llama_index.core.prompts.prompt_type import PromptType from llama_index.core.schema import QueryBundle @@ -61,7 +62,7 @@ def __init__( prompt: SingleSelectPrompt, ) -> None: self._llm = llm - self._prompt = prompt + self._prompt: BasePromptTemplate = prompt if self._prompt.output_parser is None: raise ValueError("Prompt should have output parser.") @@ -152,7 +153,7 @@ def __init__( max_outputs: Optional[int] = None, ) -> None: self._llm = llm - self._prompt = prompt + self._prompt: BasePromptTemplate = prompt self._max_outputs = max_outputs if self._prompt.output_parser is None: diff --git a/llama-index-core/llama_index/core/selectors/types.py b/llama-index-core/llama_index/core/selectors/types.py deleted file mode 100644 index f752fb7cff91d..0000000000000 --- a/llama-index-core/llama_index/core/selectors/types.py +++ /dev/null @@ -1,115 +0,0 @@ -from abc import abstractmethod -from typing import Any, List, Sequence, Union - -from llama_index.core.base.query_pipeline.query import ( - ChainableMixin, - QueryComponent, -) -from llama_index.core.bridge.pydantic import BaseModel -from llama_index.core.instrumentation import DispatcherSpanMixin -from llama_index.core.prompts.mixin import PromptMixin, PromptMixinType -from llama_index.core.schema import QueryBundle, QueryType -from llama_index.core.tools.types import ToolMetadata - -MetadataType = Union[str, ToolMetadata] - - -class SingleSelection(BaseModel): - """A single selection of a choice.""" - - index: int - reason: str - - -class MultiSelection(BaseModel): - """A multi-selection of choices.""" - - selections: List[SingleSelection] - - @property - def ind(self) -> int: - if len(self.selections) != 1: - raise ValueError( - f"There are {len(self.selections)} selections, " "please use .inds." - ) - return self.selections[0].index - - @property - def reason(self) -> str: - if len(self.reasons) != 1: - raise ValueError( - f"There are {len(self.reasons)} selections, " "please use .reasons." - ) - return self.selections[0].reason - - @property - def inds(self) -> List[int]: - return [x.index for x in self.selections] - - @property - def reasons(self) -> List[str]: - return [x.reason for x in self.selections] - - -# separate name for clarity and to not confuse function calling model -SelectorResult = MultiSelection - - -def _wrap_choice(choice: MetadataType) -> ToolMetadata: - if isinstance(choice, ToolMetadata): - return choice - elif isinstance(choice, str): - return ToolMetadata(description=choice) - else: - raise ValueError(f"Unexpected type: {type(choice)}") - - -def _wrap_query(query: QueryType) -> QueryBundle: - if isinstance(query, QueryBundle): - return query - elif isinstance(query, str): - return QueryBundle(query_str=query) - else: - raise ValueError(f"Unexpected type: {type(query)}") - - -class BaseSelector(PromptMixin, ChainableMixin, DispatcherSpanMixin): - """Base selector.""" - - def _get_prompt_modules(self) -> PromptMixinType: - """Get prompt sub-modules.""" - return {} - - def select( - self, choices: Sequence[MetadataType], query: QueryType - ) -> SelectorResult: - metadatas = [_wrap_choice(choice) for choice in choices] - query_bundle = _wrap_query(query) - return self._select(choices=metadatas, query=query_bundle) - - async def aselect( - self, choices: Sequence[MetadataType], query: QueryType - ) -> SelectorResult: - metadatas = [_wrap_choice(choice) for choice in choices] - query_bundle = _wrap_query(query) - return await self._aselect(choices=metadatas, query=query_bundle) - - @abstractmethod - def _select( - self, choices: Sequence[ToolMetadata], query: QueryBundle - ) -> SelectorResult: - pass - - @abstractmethod - async def _aselect( - self, choices: Sequence[ToolMetadata], query: QueryBundle - ) -> SelectorResult: - pass - - def _as_query_component(self, **kwargs: Any) -> QueryComponent: - """As query component.""" - from llama_index.core.query_pipeline.components.router import ( - SelectorComponent, - ) - - return SelectorComponent(selector=self) diff --git a/llama-index-core/llama_index/core/storage/chat_store/simple_chat_store.py b/llama-index-core/llama_index/core/storage/chat_store/simple_chat_store.py index fe3aee03908c3..e7d0f8b192fb5 100644 --- a/llama-index-core/llama_index/core/storage/chat_store/simple_chat_store.py +++ b/llama-index-core/llama_index/core/storage/chat_store/simple_chat_store.py @@ -9,7 +9,9 @@ from llama_index.core.storage.chat_store.base import BaseChatStore -def chat_message_serialization(chat_message: Any, handler, info) -> Dict[str, Any]: +def chat_message_serialization( + chat_message: Any, handler: Any, info: Any +) -> Dict[str, Any]: partial_result = handler(chat_message, info) for key, value in partial_result.get("additional_kwargs", {}).items(): @@ -89,7 +91,7 @@ def persist( fs.makedirs(dirpath) with fs.open(persist_path, "w") as f: - f.write(json.dumps(self.json())) + f.write(self.json()) @classmethod def from_persist_path( @@ -103,4 +105,8 @@ def from_persist_path( return cls() with fs.open(persist_path, "r") as f: data = json.load(f) - return cls.model_validate_json(data) + + if isinstance(data, str): + return cls.model_validate_json(data) + else: + return cls.model_validate(data) diff --git a/llama-index-core/llama_index/core/storage/docstore/keyval_docstore.py b/llama-index-core/llama_index/core/storage/docstore/keyval_docstore.py index 9713d5bccc0db..e9d9a8c3e2dd5 100644 --- a/llama-index-core/llama_index/core/storage/docstore/keyval_docstore.py +++ b/llama-index-core/llama_index/core/storage/docstore/keyval_docstore.py @@ -1,7 +1,7 @@ """Document store.""" import asyncio -from typing import Dict, List, Optional, Sequence, Tuple +from typing import Any, Dict, List, Optional, Sequence, Tuple from llama_index.core.schema import BaseNode, TextNode from llama_index.core.storage.docstore.types import BaseDocumentStore, RefDocInfo @@ -123,12 +123,11 @@ def _get_kv_pairs_for_insert( return node_kv_pair, metadata_kv_pair, ref_doc_kv_pair def _merge_ref_doc_kv_pairs(self, ref_doc_kv_pairs: dict) -> List[Tuple[str, dict]]: - merged_ref_doc_kv_pairs = [] + merged_ref_doc_kv_pairs: List[Tuple[str, dict]] = [] for key, kv_pairs in ref_doc_kv_pairs.items(): - merged_ref_doc_kv_pairs = [] for key, kv_pairs in ref_doc_kv_pairs.items(): - merged_node_ids = [] - metadata = {} + merged_node_ids: List[str] = [] + metadata: Dict[str, Any] = {} for kv_pair in kv_pairs: nodes = kv_pair[1].get("node_ids", []) new_nodes = set(nodes).difference(set(merged_node_ids)) diff --git a/llama-index-core/llama_index/core/storage/docstore/postgres_docstore.py b/llama-index-core/llama_index/core/storage/docstore/postgres_docstore.py deleted file mode 100644 index f29a87faf9d71..0000000000000 --- a/llama-index-core/llama_index/core/storage/docstore/postgres_docstore.py +++ /dev/null @@ -1,79 +0,0 @@ -from typing import Optional - -from llama_index.core.storage.docstore.keyval_docstore import KVDocumentStore -from llama_index.core.storage.docstore.types import DEFAULT_BATCH_SIZE -from llama_index.core.storage.kvstore.postgres_kvstore import PostgresKVStore - - -class PostgresDocumentStore(KVDocumentStore): - """Postgres Document (Node) store. - - A Postgres store for Document and Node objects. - - Args: - postgres_kvstore (PostgresKVStore): Postgres key-value store - namespace (str): namespace for the docstore - batch_size (int): batch size for bulk operations - - """ - - def __init__( - self, - postgres_kvstore: PostgresKVStore, - namespace: Optional[str] = None, - batch_size: int = DEFAULT_BATCH_SIZE, - ) -> None: - """Init a PostgresDocumentStore.""" - super().__init__(postgres_kvstore, namespace=namespace, batch_size=batch_size) - - @classmethod - def from_uri( - cls, - uri: str, - namespace: Optional[str] = None, - table_name: str = "docstore", - schema_name: str = "public", - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> "PostgresDocumentStore": - """Load a PostgresDocumentStore from a Postgres URI.""" - postgres_kvstore = PostgresKVStore.from_uri( - uri=uri, - table_name=table_name, - schema_name=schema_name, - perform_setup=perform_setup, - debug=debug, - use_jsonb=use_jsonb, - ) - return cls(postgres_kvstore, namespace) - - @classmethod - def from_params( - cls, - host: Optional[str] = None, - port: Optional[str] = None, - database: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - namespace: Optional[str] = None, - table_name: str = "docstore", - schema_name: str = "public", - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> "PostgresDocumentStore": - """Load a PostgresDocumentStore from a Postgres host and port.""" - postgres_kvstore = PostgresKVStore.from_params( - host=host, - port=port, - database=database, - user=user, - password=password, - table_name=table_name, - schema_name=schema_name, - perform_setup=perform_setup, - debug=debug, - use_jsonb=use_jsonb, - ) - return cls(postgres_kvstore, namespace) diff --git a/llama-index-core/llama_index/core/storage/docstore/utils.py b/llama-index-core/llama_index/core/storage/docstore/utils.py index 212e5c6908d3b..8e8cbf8731acf 100644 --- a/llama-index-core/llama_index/core/storage/docstore/utils.py +++ b/llama-index-core/llama_index/core/storage/docstore/utils.py @@ -13,7 +13,7 @@ def doc_to_json(doc: BaseNode) -> dict: return { - DATA_KEY: doc.dict(), + DATA_KEY: doc.to_dict(), TYPE_KEY: doc.get_type(), } @@ -27,9 +27,10 @@ def json_to_doc(doc_dict: dict) -> BaseNode: return legacy_json_to_doc(doc_dict) else: if doc_type == Document.get_type(): - doc = Document.from_dict(data_dict) - elif doc_type == ImageDocument.get_type(): - doc = ImageDocument.from_dict(data_dict) + if data_dict["class_name"] == ImageDocument.class_name(): + doc = ImageDocument.from_dict(data_dict) + else: + doc = Document.from_dict(data_dict) elif doc_type == TextNode.get_type(): doc = TextNode.from_dict(data_dict) elif doc_type == ImageNode.get_type(): diff --git a/llama-index-core/llama_index/core/storage/index_store/postgres_index_store.py b/llama-index-core/llama_index/core/storage/index_store/postgres_index_store.py deleted file mode 100644 index 4c4311f8fcc05..0000000000000 --- a/llama-index-core/llama_index/core/storage/index_store/postgres_index_store.py +++ /dev/null @@ -1,74 +0,0 @@ -from typing import Optional - -from llama_index.core.storage.index_store.keyval_index_store import KVIndexStore -from llama_index.core.storage.kvstore.postgres_kvstore import PostgresKVStore - - -class PostgresIndexStore(KVIndexStore): - """Postgres Index store. - - Args: - postgres_kvstore (PostgresKVStore): Postgres key-value store - namespace (str): namespace for the index store - - """ - - def __init__( - self, - postgres_kvstore: PostgresKVStore, - namespace: Optional[str] = None, - ) -> None: - """Init a PostgresIndexStore.""" - super().__init__(postgres_kvstore, namespace=namespace) - - @classmethod - def from_uri( - cls, - uri: str, - namespace: Optional[str] = None, - table_name: str = "indexstore", - schema_name: str = "public", - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> "PostgresIndexStore": - """Load a PostgresIndexStore from a PostgresURI.""" - postgres_kvstore = PostgresKVStore.from_uri( - uri=uri, - table_name=table_name, - schema_name=schema_name, - perform_setup=perform_setup, - debug=debug, - use_jsonb=use_jsonb, - ) - return cls(postgres_kvstore, namespace) - - @classmethod - def from_params( - cls, - host: Optional[str] = None, - port: Optional[str] = None, - database: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - namespace: Optional[str] = None, - table_name: str = "indexstore", - schema_name: str = "public", - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> "PostgresIndexStore": - """Load a PostgresIndexStore from a Postgres host and port.""" - postgres_kvstore = PostgresKVStore.from_params( - host=host, - port=port, - database=database, - user=user, - password=password, - table_name=table_name, - schema_name=schema_name, - perform_setup=perform_setup, - debug=debug, - use_jsonb=use_jsonb, - ) - return cls(postgres_kvstore, namespace) diff --git a/llama-index-core/llama_index/core/storage/kvstore/postgres_kvstore.py b/llama-index-core/llama_index/core/storage/kvstore/postgres_kvstore.py deleted file mode 100644 index 966e26df42073..0000000000000 --- a/llama-index-core/llama_index/core/storage/kvstore/postgres_kvstore.py +++ /dev/null @@ -1,462 +0,0 @@ -import json -from typing import Any, Dict, List, Optional, Tuple, Type -from urllib.parse import urlparse - -from llama_index.core.storage.kvstore.types import ( - DEFAULT_BATCH_SIZE, - DEFAULT_COLLECTION, - BaseKVStore, -) - -IMPORT_ERROR_MSG = "`asyncpg` package not found, please run `pip install asyncpg`" - - -def get_data_model( - base: Type, - index_name: str, - schema_name: str, - use_jsonb: bool = False, -) -> Any: - """ - This part create a dynamic sqlalchemy model with a new table. - """ - from sqlalchemy import Column, Index, Integer, UniqueConstraint - from sqlalchemy.dialects.postgresql import JSON, JSONB, VARCHAR - - tablename = "data_%s" % index_name # dynamic table name - class_name = "Data%s" % index_name # dynamic class name - - metadata_dtype = JSONB if use_jsonb else JSON - - class AbstractData(base): # type: ignore - __abstract__ = True # this line is necessary - id = Column(Integer, primary_key=True, autoincrement=True) - key = Column(VARCHAR, nullable=False) - namespace = Column(VARCHAR, nullable=False) - value = Column(metadata_dtype) - - return type( - class_name, - (AbstractData,), - { - "__tablename__": tablename, - "__table_args__": ( - UniqueConstraint( - "key", "namespace", name=f"{tablename}:unique_key_namespace" - ), - Index(f"{tablename}:idx_key_namespace", "key", "namespace"), - {"schema": schema_name}, - ), - }, - ) - - -class PostgresKVStore(BaseKVStore): - """Postgres Key-Value store. - - Args: - connection_string (str): psycopg2 connection string - async_connection_string (str): asyncpg connection string - table_name (str): table name - schema_name (Optional[str]): schema name - perform_setup (Optional[bool]): perform table setup - debug (Optional[bool]): debug mode - use_jsonb (Optional[bool]): use JSONB data type for storage - """ - - connection_string: str - async_connection_string: str - table_name: str - schema_name: str - perform_setup: bool - debug: bool - use_jsonb: bool - - def __init__( - self, - connection_string: str, - async_connection_string: str, - table_name: str, - schema_name: str = "public", - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> None: - try: - import asyncpg # noqa - import psycopg2 # noqa - import sqlalchemy - import sqlalchemy.ext.asyncio # noqa - except ImportError: - raise ImportError( - "`sqlalchemy[asyncio]`, `psycopg2-binary` and `asyncpg` " - "packages should be pre installed" - ) - - table_name = table_name.lower() - schema_name = schema_name.lower() - self.connection_string = connection_string - self.async_connection_string = async_connection_string - self.table_name = table_name - self.schema_name = schema_name - self.perform_setup = perform_setup - self.debug = debug - self.use_jsonb = use_jsonb - self._is_initialized = False - - from sqlalchemy.orm import declarative_base - - # sqlalchemy model - self._base = declarative_base() - self._table_class = get_data_model( - self._base, - table_name, - schema_name, - use_jsonb=use_jsonb, - ) - - @classmethod - def from_params( - cls, - host: Optional[str] = None, - port: Optional[str] = None, - database: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - table_name: str = "kvstore", - schema_name: str = "public", - connection_string: Optional[str] = None, - async_connection_string: Optional[str] = None, - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> "PostgresKVStore": - """Return connection string from database parameters.""" - conn_str = ( - connection_string - or f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}" - ) - async_conn_str = async_connection_string or ( - f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{database}" - ) - return cls( - connection_string=conn_str, - async_connection_string=async_conn_str, - table_name=table_name, - schema_name=schema_name, - perform_setup=perform_setup, - debug=debug, - use_jsonb=use_jsonb, - ) - - @classmethod - def from_uri( - cls, - uri: str, - table_name: str = "kvstore", - schema_name: str = "public", - perform_setup: bool = True, - debug: bool = False, - use_jsonb: bool = False, - ) -> "PostgresKVStore": - """Return connection string from database parameters.""" - params = params_from_uri(uri) - return cls.from_params( - **params, - table_name=table_name, - schema_name=schema_name, - perform_setup=perform_setup, - debug=debug, - use_jsonb=use_jsonb, - ) - - def _connect(self) -> Any: - from sqlalchemy import create_engine - from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine - from sqlalchemy.orm import sessionmaker - - self._engine = create_engine(self.connection_string, echo=self.debug) - self._session = sessionmaker(self._engine) - - self._async_engine = create_async_engine(self.async_connection_string) - self._async_session = sessionmaker(self._async_engine, class_=AsyncSession) - - def _create_schema_if_not_exists(self) -> None: - with self._session() as session, session.begin(): - from sqlalchemy import text - - # Check if the specified schema exists with "CREATE" statement - check_schema_statement = text( - f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{self.schema_name}'" - ) - result = session.execute(check_schema_statement).fetchone() - - # If the schema does not exist, then create it - if not result: - create_schema_statement = text( - f"CREATE SCHEMA IF NOT EXISTS {self.schema_name}" - ) - session.execute(create_schema_statement) - - session.commit() - - def _create_tables_if_not_exists(self) -> None: - with self._session() as session, session.begin(): - self._base.metadata.create_all(session.connection()) - - def _initialize(self) -> None: - if not self._is_initialized: - self._connect() - if self.perform_setup: - self._create_schema_if_not_exists() - self._create_tables_if_not_exists() - self._is_initialized = True - - def put( - self, - key: str, - val: dict, - collection: str = DEFAULT_COLLECTION, - ) -> None: - """Put a key-value pair into the store. - - Args: - key (str): key - val (dict): value - collection (str): collection name - - """ - self.put_all([(key, val)], collection=collection) - - async def aput( - self, - key: str, - val: dict, - collection: str = DEFAULT_COLLECTION, - ) -> None: - """Put a key-value pair into the store. - - Args: - key (str): key - val (dict): value - collection (str): collection name - - """ - await self.aput_all([(key, val)], collection=collection) - - def put_all( - self, - kv_pairs: List[Tuple[str, dict]], - collection: str = DEFAULT_COLLECTION, - batch_size: int = DEFAULT_BATCH_SIZE, - ) -> None: - from sqlalchemy import text - - self._initialize() - with self._session() as session: - for i in range(0, len(kv_pairs), batch_size): - batch = kv_pairs[i : i + batch_size] - - # Prepare the VALUES part of the SQL statement - values_clause = ", ".join( - f"(:key_{i}, :namespace_{i}, :value_{i})" - for i, _ in enumerate(batch) - ) - - # Prepare the raw SQL for bulk upsert - # Note: This SQL is PostgreSQL-specific. Adjust for other databases. - stmt = text( - f""" - INSERT INTO {self.schema_name}.{self._table_class.__tablename__} (key, namespace, value) - VALUES {values_clause} - ON CONFLICT (key, namespace) - DO UPDATE SET - value = EXCLUDED.value; - """ - ) - - # Flatten the list of tuples for execute parameters - params = {} - for i, (key, value) in enumerate(batch): - params[f"key_{i}"] = key - params[f"namespace_{i}"] = collection - params[f"value_{i}"] = json.dumps(value) - - # Execute the bulk upsert - session.execute(stmt, params) - session.commit() - - async def aput_all( - self, - kv_pairs: List[Tuple[str, dict]], - collection: str = DEFAULT_COLLECTION, - batch_size: int = DEFAULT_BATCH_SIZE, - ) -> None: - from sqlalchemy import text - - self._initialize() - async with self._async_session() as session: - for i in range(0, len(kv_pairs), batch_size): - batch = kv_pairs[i : i + batch_size] - - # Prepare the VALUES part of the SQL statement - values_clause = ", ".join( - f"(:key_{i}, :namespace_{i}, :value_{i})" - for i, _ in enumerate(batch) - ) - - # Prepare the raw SQL for bulk upsert - # Note: This SQL is PostgreSQL-specific. Adjust for other databases. - stmt = text( - f""" - INSERT INTO {self.schema_name}.{self._table_class.__tablename__} (key, namespace, value) - VALUES {values_clause} - ON CONFLICT (key, namespace) - DO UPDATE SET - value = EXCLUDED.value; - """ - ) - - # Flatten the list of tuples for execute parameters - params = {} - for i, (key, value) in enumerate(batch): - params[f"key_{i}"] = key - params[f"namespace_{i}"] = collection - params[f"value_{i}"] = json.dumps(value) - - # Execute the bulk upsert - await session.execute(stmt, params) - await session.commit() - - def get(self, key: str, collection: str = DEFAULT_COLLECTION) -> Optional[dict]: - """Get a value from the store. - - Args: - key (str): key - collection (str): collection name - - """ - from sqlalchemy import select - - self._initialize() - with self._session() as session: - result = session.execute( - select(self._table_class) - .filter_by(key=key) - .filter_by(namespace=collection) - ) - result = result.scalars().first() - if result: - return result.value - return None - - async def aget( - self, key: str, collection: str = DEFAULT_COLLECTION - ) -> Optional[dict]: - """Get a value from the store. - - Args: - key (str): key - collection (str): collection name - - """ - from sqlalchemy import select - - self._initialize() - async with self._async_session() as session: - result = await session.execute( - select(self._table_class) - .filter_by(key=key) - .filter_by(namespace=collection) - ) - result = result.scalars().first() - if result: - return result.value - return None - - def get_all(self, collection: str = DEFAULT_COLLECTION) -> Dict[str, dict]: - """Get all values from the store. - - Args: - collection (str): collection name - - """ - from sqlalchemy import select - - self._initialize() - with self._session() as session: - results = session.execute( - select(self._table_class).filter_by(namespace=collection) - ) - results = results.scalars().all() - return {result.key: result.value for result in results} if results else {} - - async def aget_all(self, collection: str = DEFAULT_COLLECTION) -> Dict[str, dict]: - """Get all values from the store. - - Args: - collection (str): collection name - - """ - from sqlalchemy import select - - self._initialize() - async with self._async_session() as session: - results = await session.execute( - select(self._table_class).filter_by(namespace=collection) - ) - results = results.scalars().all() - return {result.key: result.value for result in results} if results else {} - - def delete(self, key: str, collection: str = DEFAULT_COLLECTION) -> bool: - """Delete a value from the store. - - Args: - key (str): key - collection (str): collection name - - """ - from sqlalchemy import delete - - self._initialize() - with self._session() as session: - result = session.execute( - delete(self._table_class) - .filter_by(namespace=collection) - .filter_by(key=key) - ) - session.commit() - return result.rowcount > 0 - - async def adelete(self, key: str, collection: str = DEFAULT_COLLECTION) -> bool: - """Delete a value from the store. - - Args: - key (str): key - collection (str): collection name - - """ - from sqlalchemy import delete - - self._initialize() - async with self._async_session() as session: - async with session.begin(): - result = await session.execute( - delete(self._table_class) - .filter_by(namespace=collection) - .filter_by(key=key) - ) - return result.rowcount > 0 - - -def params_from_uri(uri: str) -> dict: - result = urlparse(uri) - database = result.path[1:] - port = result.port if result.port else 5432 - return { - "database": database, - "user": result.username, - "password": result.password, - "host": result.hostname, - "port": port, - } diff --git a/llama-index-core/llama_index/core/tools/calling.py b/llama-index-core/llama_index/core/tools/calling.py index 0f53a03ca38b6..6730d5b3f695c 100644 --- a/llama-index-core/llama_index/core/tools/calling.py +++ b/llama-index-core/llama_index/core/tools/calling.py @@ -1,5 +1,5 @@ from llama_index.core.tools.types import BaseTool, ToolOutput, adapt_to_async_tool -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING, Sequence from llama_index.core.llms.llm import ToolSelection import json @@ -60,7 +60,7 @@ async def acall_tool(tool: BaseTool, arguments: dict) -> ToolOutput: def call_tool_with_selection( tool_call: ToolSelection, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], verbose: bool = False, ) -> ToolOutput: from llama_index.core.tools.calling import call_tool @@ -83,7 +83,7 @@ def call_tool_with_selection( async def acall_tool_with_selection( tool_call: ToolSelection, - tools: List["BaseTool"], + tools: Sequence["BaseTool"], verbose: bool = False, ) -> ToolOutput: from llama_index.core.tools.calling import acall_tool diff --git a/llama-index-core/llama_index/core/tools/eval_query_engine.py b/llama-index-core/llama_index/core/tools/eval_query_engine.py index 506af37dd5f66..447dfc3af1ddf 100644 --- a/llama-index-core/llama_index/core/tools/eval_query_engine.py +++ b/llama-index-core/llama_index/core/tools/eval_query_engine.py @@ -37,9 +37,9 @@ class EvalQueryEngineTool(QueryEngineTool): def __init__( self, evaluator: BaseEvaluator, - *args, + *args: Any, failed_tool_output_template: str = FAILED_TOOL_OUTPUT_TEMPLATE, - **kwargs + **kwargs: Any, ): super().__init__(*args, **kwargs) self._evaluator = evaluator @@ -63,10 +63,11 @@ def _process_tool_output( def from_defaults( cls, query_engine: BaseQueryEngine, - evaluator: Optional[BaseEvaluator] = None, name: Optional[str] = None, description: Optional[str] = None, + return_direct: bool = False, resolve_input_errors: bool = True, + evaluator: Optional[BaseEvaluator] = None, ) -> "EvalQueryEngineTool": return cls( evaluator=evaluator or AnswerRelevancyEvaluator(), @@ -74,6 +75,7 @@ def from_defaults( metadata=ToolMetadata( name=name or DEFAULT_NAME, description=description or DEFAULT_DESCRIPTION, + return_direct=return_direct, ), resolve_input_errors=resolve_input_errors, ) diff --git a/llama-index-core/llama_index/core/tools/function_tool.py b/llama-index-core/llama_index/core/tools/function_tool.py index 8e1e32ef5f0a0..23cf7a5f8a48d 100644 --- a/llama-index-core/llama_index/core/tools/function_tool.py +++ b/llama-index-core/llama_index/core/tools/function_tool.py @@ -27,7 +27,7 @@ def async_to_sync(func_async: AsyncCallable) -> Callable: """Async from sync.""" def _sync_wrapped_fn(*args: Any, **kwargs: Any) -> Any: - return asyncio_run(func_async(*args, **kwargs)) + return asyncio_run(func_async(*args, **kwargs)) # type: ignore[arg-type] return _sync_wrapped_fn @@ -46,14 +46,16 @@ def __init__( async_fn: Optional[AsyncCallable] = None, ) -> None: if fn is None and async_fn is None: - raise ValueError("Either fn or async_fn must be provided.") + raise ValueError("fn or async_fn must be provided.") + if fn is not None: self._fn = fn - else: + elif async_fn is not None: self._fn = async_to_sync(async_fn) + if async_fn is not None: self._async_fn = async_fn - else: + elif fn is not None: self._async_fn = sync_to_async(self._fn) if metadata is None: @@ -73,12 +75,14 @@ def from_defaults( tool_metadata: Optional[ToolMetadata] = None, ) -> "FunctionTool": if tool_metadata is None: - name = name or fn.__name__ - docstring = fn.__doc__ - description = description or f"{name}{signature(fn)}\n{docstring}" + fn_to_parse = fn or async_fn + assert fn_to_parse is not None, "fn or async_fn must be provided." + name = name or fn_to_parse.__name__ + docstring = fn_to_parse.__doc__ + description = description or f"{name}{signature(fn_to_parse)}\n{docstring}" if fn_schema is None: fn_schema = create_schema_from_function( - f"{name}", fn or async_fn, additional_fields=None + f"{name}", fn_to_parse, additional_fields=None ) tool_metadata = ToolMetadata( name=name, diff --git a/llama-index-core/llama_index/core/tools/query_engine.py b/llama-index-core/llama_index/core/tools/query_engine.py index 13b6933598529..dff7d99e37ea2 100644 --- a/llama-index-core/llama_index/core/tools/query_engine.py +++ b/llama-index-core/llama_index/core/tools/query_engine.py @@ -96,7 +96,7 @@ def as_langchain_tool(self) -> "LlamaIndexTool": ) return LlamaIndexTool.from_tool_config(tool_config=tool_config) - def _get_query_str(self, *args, **kwargs) -> str: + def _get_query_str(self, *args: Any, **kwargs: Any) -> str: if args is not None and len(args) > 0: query_str = str(args[0]) elif kwargs is not None and "input" in kwargs: diff --git a/llama-index-core/llama_index/core/tools/query_plan.py b/llama-index-core/llama_index/core/tools/query_plan.py index 7cd1fffc26160..1b0b1e68ee4a8 100644 --- a/llama-index-core/llama_index/core/tools/query_plan.py +++ b/llama-index-core/llama_index/core/tools/query_plan.py @@ -98,7 +98,7 @@ def __init__( self._response_synthesizer = response_synthesizer self._name = name self._description_prefix = description_prefix - self._custom_metadata = None + self._custom_metadata: Optional[ToolMetadata] = None @classmethod def from_defaults( @@ -141,7 +141,7 @@ def metadata(self) -> ToolMetadata: return ToolMetadata(description, self._name, fn_schema=QueryPlan) @metadata.setter - def metadata(self, value: ToolMetadata): + def metadata(self, value: ToolMetadata) -> None: self._custom_metadata = value def _execute_node( diff --git a/llama-index-core/llama_index/core/tools/types.py b/llama-index-core/llama_index/core/tools/types.py index e965777560ea1..2355669858cf1 100644 --- a/llama-index-core/llama_index/core/tools/types.py +++ b/llama-index-core/llama_index/core/tools/types.py @@ -38,7 +38,7 @@ def get_parameters_dict(self) -> dict: parameters = { k: v for k, v in parameters.items() - if k in ["type", "properties", "required", "definitions"] + if k in ["type", "properties", "required", "definitions", "$defs"] } return parameters diff --git a/llama-index-core/llama_index/core/tools/utils.py b/llama-index-core/llama_index/core/tools/utils.py index 1c5a7def5043b..09c2c73f2dd0e 100644 --- a/llama-index-core/llama_index/core/tools/utils.py +++ b/llama-index-core/llama_index/core/tools/utils.py @@ -1,12 +1,12 @@ from inspect import signature -from typing import Any, Callable, List, Optional, Tuple, Type, Union, cast +from typing import Any, Awaitable, Callable, List, Optional, Tuple, Type, Union, cast from llama_index.core.bridge.pydantic import BaseModel, FieldInfo, create_model def create_schema_from_function( name: str, - func: Callable[..., Any], + func: Union[Callable[..., Any], Callable[..., Awaitable[Any]]], additional_fields: Optional[ List[Union[Tuple[str, Type, Any], Tuple[str, Type]]] ] = None, diff --git a/llama-index-core/llama_index/core/types.py b/llama-index-core/llama_index/core/types.py index 226444e2b892e..63cd2365197da 100644 --- a/llama-index-core/llama_index/core/types.py +++ b/llama-index-core/llama_index/core/types.py @@ -6,10 +6,13 @@ from typing import ( Any, AsyncGenerator, + Callable, Dict, Generator, Generic, List, + Optional, + Tuple, Type, TypeVar, Union, @@ -82,18 +85,20 @@ def output_cls(self) -> Type[Model]: pass @abstractmethod - def __call__(self, *args: Any, **kwds: Any) -> Model: + def __call__(self, *args: Any, **kwargs: Any) -> Model: pass - async def acall(self, *args: Any, **kwds: Any) -> Model: - return self(*args, **kwds) + async def acall(self, *args: Any, **kwargs: Any) -> Model: + return self(*args, **kwargs) - def stream_call(self, *args: Any, **kwds: Any) -> Generator[Model, None, None]: + def stream_call( + self, *args: Any, **kwargs: Any + ) -> Generator[Union[Model, List[Model]], None, None]: raise NotImplementedError("stream_call is not supported by default.") async def astream_call( - self, *args: Any, **kwds: Any - ) -> AsyncGenerator[Model, None]: + self, *args: Any, **kwargs: Any + ) -> AsyncGenerator[Union[Model, List[Model]], None]: raise NotImplementedError("astream_call is not supported by default.") @@ -114,14 +119,26 @@ class Thread(threading.Thread): """ def __init__( - self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None + self, + group: Optional[Any] = None, + target: Optional[Callable[..., Any]] = None, + name: Optional[str] = None, + args: Tuple[Any, ...] = (), + kwargs: Optional[Dict[str, Any]] = None, + *, + daemon: Optional[bool] = None ) -> None: + if target is not None: + args = ( + partial(target, *args, **(kwargs if isinstance(kwargs, dict) else {})), + ) + else: + args = () + super().__init__( group=group, target=copy_context().run, name=name, - args=( - partial(target, *args, **(kwargs if isinstance(kwargs, dict) else {})), - ), + args=args, daemon=daemon, ) diff --git a/llama-index-core/llama_index/core/vector_stores/simple.py b/llama-index-core/llama_index/core/vector_stores/simple.py index 81643b8ca90d3..1343f265a2274 100644 --- a/llama-index-core/llama_index/core/vector_stores/simple.py +++ b/llama-index-core/llama_index/core/vector_stores/simple.py @@ -4,7 +4,7 @@ import logging import os from dataclasses import dataclass, field -from typing import Any, Callable, Dict, List, Mapping, Optional, cast +from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, cast import fsspec from dataclasses_json import DataClassJsonMixin @@ -49,7 +49,7 @@ def _build_metadata_filter_fn( ) -> Callable[[str], bool]: """Build metadata filter function.""" filter_list = metadata_filters.filters if metadata_filters else [] - if not filter_list: + if not filter_list or not metadata_filters: return lambda _: True filter_condition = cast(MetadataFilters, metadata_filters.condition) @@ -90,6 +90,9 @@ def _process_filter_match( filter_matches_list = [] for filter_ in filter_list: + if isinstance(filter_, MetadataFilters): + raise ValueError("Nested MetadataFilters are not supported.") + filter_matches = True metadata_value = metadata.get(filter_.key, None) if filter_.operator == FilterOperator.IS_EMPTY: @@ -156,21 +159,18 @@ def __init__( **kwargs: Any, ) -> None: """Initialize params.""" - super().__init__(data=data or SimpleVectorStoreData()) + super().__init__(data=data or SimpleVectorStoreData()) # type: ignore[call-arg] self._fs = fs or fsspec.filesystem("file") @classmethod def from_persist_dir( cls, persist_dir: str = DEFAULT_PERSIST_DIR, - namespace: Optional[str] = None, + namespace: str = DEFAULT_VECTOR_STORE, fs: Optional[fsspec.AbstractFileSystem] = None, ) -> "SimpleVectorStore": """Load from persist dir.""" - if namespace: - persist_fname = f"{namespace}{NAMESPACE_SEP}{DEFAULT_PERSIST_FNAME}" - else: - persist_fname = DEFAULT_PERSIST_FNAME + persist_fname = f"{namespace}{NAMESPACE_SEP}{DEFAULT_PERSIST_FNAME}" if fs is not None: persist_path = concat_dirs(persist_dir, persist_fname) @@ -246,7 +246,7 @@ def get_nodes( def add( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], **add_kwargs: Any, ) -> List[str]: """Add nodes to index.""" @@ -416,9 +416,9 @@ def from_persist_path( return cls(data) @classmethod - def from_dict(cls, save_dict: dict) -> "SimpleVectorStore": - data = SimpleVectorStoreData.from_dict(save_dict) - return cls(data) + def from_dict(cls, data: Dict[str, Any], **kwargs: Any) -> "SimpleVectorStore": + save_data = SimpleVectorStoreData.from_dict(data) + return cls(save_data) - def to_dict(self) -> dict: + def to_dict(self, **kwargs: Any) -> Dict[str, Any]: return self.data.to_dict() diff --git a/llama-index-core/llama_index/core/vector_stores/types.py b/llama-index-core/llama_index/core/vector_stores/types.py index f6e67f2a8c301..4d582e6832c96 100644 --- a/llama-index-core/llama_index/core/vector_stores/types.py +++ b/llama-index-core/llama_index/core/vector_stores/types.py @@ -181,7 +181,10 @@ def legacy_filters(self) -> List[ExactMatchFilter]: """Convert MetadataFilters to legacy ExactMatchFilters.""" filters = [] for filter in self.filters: - if filter.operator != FilterOperator.EQ: + if ( + isinstance(filter, MetadataFilters) + or filter.operator != FilterOperator.EQ + ): raise ValueError( "Vector Store only supports exact match filters. " "Please use ExactMatchFilter or FilterOperator.EQ instead." @@ -349,14 +352,14 @@ async def aget_nodes( @abstractmethod def add( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], **kwargs: Any, ) -> List[str]: """Add nodes to vector store.""" async def async_add( self, - nodes: List[BaseNode], + nodes: Sequence[BaseNode], **kwargs: Any, ) -> List[str]: """ diff --git a/llama-index-core/llama_index/core/workflow/context.py b/llama-index-core/llama_index/core/workflow/context.py index 963717b060734..72c365c64fd72 100644 --- a/llama-index-core/llama_index/core/workflow/context.py +++ b/llama-index-core/llama_index/core/workflow/context.py @@ -1,11 +1,14 @@ -from collections import defaultdict import asyncio -from typing import Dict, Any, Optional, List, Type, TYPE_CHECKING +import warnings +from collections import defaultdict +from typing import Dict, Any, Optional, List, Type, TYPE_CHECKING, Set, Tuple +from .decorators import StepConfig from .events import Event +from .errors import WorkflowRuntimeError -if TYPE_CHECKING: - from .session import WorkflowSession +if TYPE_CHECKING: # pragma: no cover + from .workflow import Workflow class Context: @@ -19,27 +22,21 @@ class Context: Both `set` and `get` operations on global data are governed by a lock, and considered coroutine-safe. """ - def __init__( - self, - session: Optional["WorkflowSession"] = None, - parent: Optional["Context"] = None, - ) -> None: + def __init__(self, workflow: "Workflow") -> None: + self._workflow = workflow + # Broker machinery + self._queues: Dict[str, asyncio.Queue] = {} + self._tasks: Set[asyncio.Task] = set() + self._broker_log: List[Event] = [] + self._step_flags: Dict[str, asyncio.Event] = {} + self._accepted_events: List[Tuple[str, str]] = [] + self._retval: Any = None + # Streaming machinery + self._streaming_queue: asyncio.Queue = asyncio.Queue() # Global data storage - if parent is not None: - self._globals = parent._globals - else: - self._globals: Dict[str, Any] = {} - self._lock = asyncio.Lock() - if session is None: - msg = "A workflow session is needed to create a root context" - raise ValueError(msg) - self._session = session - - # Local data storage - self._locals: Dict[str, Any] = {} - + self._lock = asyncio.Lock() + self._globals: Dict[str, Any] = {} # Step-specific instance - self._parent: Optional[Context] = parent self._events_buffer: Dict[Type[Event], List[Event]] = defaultdict(list) async def set(self, key: str, value: Any, make_private: bool = False) -> None: @@ -48,17 +45,14 @@ async def set(self, key: str, value: Any, make_private: bool = False) -> None: Args: key: A unique string to identify the value stored. value: The data to be stored. - make_private: Make the value only accessible from the step that stored it. Raises: ValueError: When make_private is True but a key already exists in the global storage. """ if make_private: - if key in self._globals: - msg = f"A key named '{key}' already exists in the Context storage." - raise ValueError(msg) - self._locals[key] = value - return + warnings.warn( + "`make_private` is deprecated and will be ignored", DeprecationWarning + ) async with self.lock: self._globals[key] = value @@ -73,34 +67,36 @@ async def get(self, key: str, default: Optional[Any] = None) -> Any: Raises: ValueError: When there's not value accessible corresponding to `key`. """ - if key in self._locals: - return self._locals[key] - elif key in self._globals: - async with self.lock: + async with self.lock: + if key in self._globals: return self._globals[key] - elif default is not None: - return default + elif default is not None: + return default msg = f"Key '{key}' not found in Context" raise ValueError(msg) @property - def data(self): + def data(self) -> Dict[str, Any]: """This property is provided for backward compatibility. Use `get` and `set` instead. """ + msg = "`data` is deprecated, please use the `get` and `set` method to store data into the Context." + warnings.warn(msg, DeprecationWarning) return self._globals @property def lock(self) -> asyncio.Lock: """Returns a mutex to lock the Context.""" - return self._parent._lock if self._parent else self._lock + return self._lock @property - def session(self) -> "WorkflowSession": - """Returns a mutex to lock the Context.""" - return self._parent._session if self._parent else self._session + def session(self) -> "Context": + """This property is provided for backward compatibility.""" + msg = "`session` is deprecated, please use the Context instance directly." + warnings.warn(msg, DeprecationWarning) + return self def collect_events( self, ev: Event, expected: List[Type[Event]] @@ -121,3 +117,41 @@ def collect_events( self._events_buffer[type(ev)].append(ev) return None + + def send_event(self, message: Event, step: Optional[str] = None) -> None: + """Sends an event to a specific step in the workflow. + + If step is None, the event is sent to all the receivers and we let + them discard events they don't want. + """ + if step is None: + for queue in self._queues.values(): + queue.put_nowait(message) + else: + if step not in self._workflow._get_steps(): + raise WorkflowRuntimeError(f"Step {step} does not exist") + + step_func = self._workflow._get_steps()[step] + step_config: Optional[StepConfig] = getattr( + step_func, "__step_config", None + ) + + if step_config and type(message) in step_config.accepted_events: + self._queues[step].put_nowait(message) + else: + raise WorkflowRuntimeError( + f"Step {step} does not accept event of type {type(message)}" + ) + + self._broker_log.append(message) + + def write_event_to_stream(self, ev: Optional[Event]) -> None: + self._streaming_queue.put_nowait(ev) + + def get_result(self) -> Any: + """Returns the result of the workflow.""" + return self._retval + + @property + def streaming_queue(self) -> asyncio.Queue: + return self._streaming_queue diff --git a/llama-index-core/llama_index/core/workflow/decorators.py b/llama-index-core/llama_index/core/workflow/decorators.py index 80eee44098ce6..486d10c56d906 100644 --- a/llama-index-core/llama_index/core/workflow/decorators.py +++ b/llama-index-core/llama_index/core/workflow/decorators.py @@ -1,6 +1,6 @@ from typing import TYPE_CHECKING, Any, Callable, List, Optional, Type -from llama_index.core.bridge.pydantic import BaseModel +from llama_index.core.bridge.pydantic import BaseModel, ConfigDict from .errors import WorkflowValidationError from .utils import ( @@ -10,25 +10,30 @@ ServiceDefinition, ) -if TYPE_CHECKING: +if TYPE_CHECKING: # pragma: no cover from .workflow import Workflow +from .retry_policy import RetryPolicy class StepConfig(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + accepted_events: List[Any] event_name: str return_types: List[Any] context_parameter: Optional[str] num_workers: int requested_services: List[ServiceDefinition] + retry_policy: Optional[RetryPolicy] def step( - *args, + *args: Any, workflow: Optional[Type["Workflow"]] = None, pass_context: bool = False, num_workers: int = 1, -): + retry_policy: Optional[RetryPolicy] = None, +) -> Callable: """Decorator used to mark methods and functions as workflow steps. Decorators are evaluated at import time, but we need to wait for @@ -38,13 +43,6 @@ def step( """ def decorator(func: Callable) -> Callable: - # If this is a free function, call add_step() explicitly. - if is_free_function(func.__qualname__): - if workflow is None: - msg = f"To decorate {func.__name__} please pass a workflow class to the @step decorator." - raise WorkflowValidationError(msg) - workflow.add_step(func) - if not isinstance(num_workers, int) or num_workers <= 0: raise WorkflowValidationError( "num_workers must be an integer greater than 0" @@ -56,15 +54,23 @@ def decorator(func: Callable) -> Callable: event_name, accepted_events = next(iter(spec.accepted_events.items())) # store the configuration in the function object - func.__step_config = StepConfig( + func.__step_config = StepConfig( # type: ignore[attr-defined] accepted_events=accepted_events, event_name=event_name, return_types=spec.return_types, context_parameter=spec.context_parameter, num_workers=num_workers, requested_services=spec.requested_services or [], + retry_policy=retry_policy, ) + # If this is a free function, call add_step() explicitly. + if is_free_function(func.__qualname__): + if workflow is None: + msg = f"To decorate {func.__name__} please pass a workflow class to the @step decorator." + raise WorkflowValidationError(msg) + workflow.add_step(func) + return func if len(args): diff --git a/llama-index-core/llama_index/core/workflow/drawing.py b/llama-index-core/llama_index/core/workflow/drawing.py index fcf376dc74aa1..9ccc370683857 100644 --- a/llama-index-core/llama_index/core/workflow/drawing.py +++ b/llama-index-core/llama_index/core/workflow/drawing.py @@ -1,6 +1,7 @@ from deprecated import deprecated from typing import Optional +from .workflow import Workflow from .events import StartEvent, StopEvent from .decorators import StepConfig from .utils import get_steps_from_class, get_steps_from_instance @@ -10,7 +11,7 @@ reason="Install `llama-index-utils-workflow` and use the import `from llama_index.utils.workflow` instead." ) def draw_all_possible_flows( - workflow, + workflow: Workflow, filename: str = "workflow_all_flows.html", notebook: bool = False, ) -> None: @@ -35,8 +36,9 @@ def draw_all_possible_flows( # If no steps are defined in the class, try to get them from the instance steps = get_steps_from_instance(workflow) - for step_name, step_func in get_steps_from_class(workflow).items(): - step_config: Optional[StepConfig] = getattr(step_func, "__step_config", None) + step_config: Optional[StepConfig] = None + for step_name, step_func in steps.items(): + step_config = getattr(step_func, "__step_config", None) if step_config is None: continue @@ -53,8 +55,8 @@ def draw_all_possible_flows( ) # Light green for events # Add edges from all steps - for step_name, step_func in get_steps_from_class(workflow).items(): - step_config: Optional[StepConfig] = getattr(step_func, "__step_config", None) + for step_name, step_func in steps.items(): + step_config = getattr(step_func, "__step_config", None) if step_config is None: continue @@ -73,7 +75,7 @@ def draw_all_possible_flows( reason="Install `llama-index-utils-workflow` and use the import `from llama_index.utils.workflow` instead." ) def draw_most_recent_execution( - workflow, + workflow: Workflow, filename: str = "workflow_recent_execution.html", notebook: bool = False, ) -> None: @@ -83,7 +85,11 @@ def draw_most_recent_execution( net = Network(directed=True, height="750px", width="100%") # Add nodes and edges based on execution history - for i, (step, event) in enumerate(workflow._accepted_events): + existing_context = next(iter(workflow._contexts), None) + if existing_context is None: + raise ValueError("No runs found in workflow") + + for i, (step, event) in enumerate(existing_context._accepted_events): event_node = f"{event}_{i}" step_node = f"{step}_{i}" net.add_node( @@ -95,7 +101,7 @@ def draw_most_recent_execution( net.add_edge(event_node, step_node) if i > 0: - prev_step_node = f"{workflow._accepted_events[i - 1][0]}_{i - 1}" + prev_step_node = f"{existing_context._accepted_events[i - 1][0]}_{i - 1}" net.add_edge(prev_step_node, event_node) net.show(filename, notebook=notebook) diff --git a/llama-index-core/llama_index/core/workflow/events.py b/llama-index-core/llama_index/core/workflow/events.py index 974f961360ef7..14981c05c3942 100644 --- a/llama-index-core/llama_index/core/workflow/events.py +++ b/llama-index-core/llama_index/core/workflow/events.py @@ -1,4 +1,5 @@ from typing import Any, Dict, Type +from _collections_abc import dict_keys, dict_items, dict_values from llama_index.core.bridge.pydantic import BaseModel, Field, PrivateAttr, ConfigDict @@ -71,7 +72,7 @@ def __init__(self, **params: Any): def __getattr__(self, __name: str) -> Any: if __name in self.__private_attributes__ or __name in self.model_fields: - return super().__getattr__(__name) + return super().__getattr__(__name) # type: ignore else: try: return self._data[__name] @@ -80,7 +81,7 @@ def __getattr__(self, __name: str) -> Any: f"'{self.__class__.__name__}' object has no attribute '{__name}'" ) - def __setattr__(self, name, value) -> None: + def __setattr__(self, name: str, value: Any) -> None: if name in self.__private_attributes__ or name in self.model_fields: super().__setattr__(name, value) else: @@ -98,13 +99,13 @@ def get(self, key: str, default: Any = None) -> Any: def __contains__(self, key: str) -> bool: return key in self._data - def keys(self) -> Dict[str, Any].keys: + def keys(self) -> "dict_keys[str, Any]": return self._data.keys() - def values(self) -> Dict[str, Any].values: + def values(self) -> "dict_values[str, Any]": return self._data.values() - def items(self) -> Dict[str, Any].items: + def items(self) -> "dict_items[str, Any]": return self._data.items() def __len__(self) -> int: diff --git a/llama-index-core/llama_index/core/workflow/retry_policy.py b/llama-index-core/llama_index/core/workflow/retry_policy.py new file mode 100644 index 0000000000000..9530a52663139 --- /dev/null +++ b/llama-index-core/llama_index/core/workflow/retry_policy.py @@ -0,0 +1,32 @@ +from typing import Protocol, Optional, runtime_checkable + + +@runtime_checkable +class RetryPolicy(Protocol): + def next( + self, elapsed_time: float, attempts: int, error: Exception + ) -> Optional[float]: + """Decide if we should make another retry, returning the number of seconds to wait before the next run. + + Args: + elapsed_time: Time in seconds that passed since the last attempt. + attempts: The number of attempts done so far. + error: The last error occurred. + + Returns: + The amount of seconds to wait before the next attempt, or None if we stop retrying. + """ + + +class ConstantDelayRetryPolicy: + def __init__(self, maximum_attempts: int = 3, delay: float = 5) -> None: + self.maximum_attempts = maximum_attempts + self.delay = delay + + def next( + self, elapsed_time: float, attempts: int, error: Exception + ) -> Optional[float]: + if attempts >= self.maximum_attempts: + return None + + return self.delay diff --git a/llama-index-core/llama_index/core/workflow/service.py b/llama-index-core/llama_index/core/workflow/service.py index 10890e6b8c21f..823837a829ed3 100644 --- a/llama-index-core/llama_index/core/workflow/service.py +++ b/llama-index-core/llama_index/core/workflow/service.py @@ -1,7 +1,7 @@ from typing import Dict, TYPE_CHECKING, Optional -if TYPE_CHECKING: +if TYPE_CHECKING: # pragma: no cover from .workflow import Workflow diff --git a/llama-index-core/llama_index/core/workflow/session.py b/llama-index-core/llama_index/core/workflow/session.py deleted file mode 100644 index 402b650417025..0000000000000 --- a/llama-index-core/llama_index/core/workflow/session.py +++ /dev/null @@ -1,77 +0,0 @@ -import asyncio -from typing import Any, TYPE_CHECKING, Dict, Set, List, Tuple, Optional - - -from .context import Context -from .decorators import StepConfig -from .events import Event -from .errors import WorkflowRuntimeError - -if TYPE_CHECKING: - from .workflow import Workflow - - -class WorkflowSession: - def __init__(self, workflow: "Workflow") -> None: - self._workflow = workflow - # Broker machinery - self._queues: Dict[str, asyncio.Queue] = {} - self._tasks: Set[asyncio.Task] = set() - self._broker_log: List[Event] = [] - self._step_flags: Dict[str, asyncio.Event] = {} - self._accepted_events: List[Tuple[str, str]] = [] - self._retval: Any = None - self._root_context = Context(self) - # Context management - self._step_to_context: Dict[str, Context] = {} - # Streaming machinery - self._streaming_queue: asyncio.Queue = asyncio.Queue() - - def send_event(self, message: Event, step: Optional[str] = None) -> None: - """Sends an event to a specific step in the workflow. - - If step is None, the event is sent to all the receivers and we let - them discard events they don't want. - """ - if step is None: - for queue in self._queues.values(): - queue.put_nowait(message) - else: - if step not in self._workflow._get_steps(): - raise WorkflowRuntimeError(f"Step {step} does not exist") - - step_func = self._workflow._get_steps()[step] - step_config: Optional[StepConfig] = getattr( - step_func, "__step_config", None - ) - - if step_config and type(message) in step_config.accepted_events: - self._queues[step].put_nowait(message) - else: - raise WorkflowRuntimeError( - f"Step {step} does not accept event of type {type(message)}" - ) - - self._broker_log.append(message) - - def write_event_to_stream(self, ev: Optional[Event]) -> None: - self._streaming_queue.put_nowait(ev) - - def get_context(self, step_name: str) -> Context: - """Get the global context for this workflow. - - The Workflow instance is ultimately responsible for managing the lifecycle - of the global context object and for passing it to the steps functions that - require it. - """ - if step_name not in self._step_to_context: - self._step_to_context[step_name] = Context(parent=self._root_context) - return self._step_to_context[step_name] - - def get_result(self) -> Any: - """Returns the result of the workflow.""" - return self._retval - - @property - def streaming_queue(self) -> asyncio.Queue: - return self._streaming_queue diff --git a/llama-index-core/llama_index/core/workflow/utils.py b/llama-index-core/llama_index/core/workflow/utils.py index 57a9f9a924ffe..5f413a3843dc1 100644 --- a/llama-index-core/llama_index/core/workflow/utils.py +++ b/llama-index-core/llama_index/core/workflow/utils.py @@ -13,13 +13,12 @@ # handle python version compatibility try: - from types import UnionType -except ImportError: - UnionType = Union + from types import UnionType # type: ignore[attr-defined] +except ImportError: # pragma: no cover + from typing import Union as UnionType from llama_index.core.bridge.pydantic import BaseModel, ConfigDict -from .context import Context from .events import Event, EventType from .errors import WorkflowValidationError @@ -57,7 +56,7 @@ def inspect_signature(fn: Callable) -> StepSignatureSpec: continue # Get name and type of the Context param - if t.annotation == Context: + if hasattr(t.annotation, "__name__") and t.annotation.__name__ == "Context": context_parameter = name continue @@ -156,7 +155,7 @@ def _get_return_types(func: Callable) -> List[Any]: return [return_hint] -def is_free_function(qualname: str): +def is_free_function(qualname: str) -> bool: """Determines whether a certain qualified name points to a free function. The strategy should be able to spot nested functions, for details see PEP-3155. diff --git a/llama-index-core/llama_index/core/workflow/workflow.py b/llama-index-core/llama_index/core/workflow/workflow.py index 01cc2d0ddbe34..dcb630702895b 100644 --- a/llama-index-core/llama_index/core/workflow/workflow.py +++ b/llama-index-core/llama_index/core/workflow/workflow.py @@ -1,37 +1,46 @@ import asyncio import functools +import time import warnings -from typing import Any, Callable, Dict, Optional, AsyncGenerator, Set +from typing import Any, Callable, Dict, Optional, AsyncGenerator, Set, Tuple from llama_index.core.instrumentation import get_dispatcher -from llama_index.core.workflow.decorators import StepConfig, step -from llama_index.core.workflow.events import Event, StartEvent, StopEvent -from llama_index.core.workflow.utils import ( + +from .decorators import StepConfig, step +from .context import Context +from .events import Event, StartEvent, StopEvent +from .errors import * +from .service import ServiceManager +from .utils import ( get_steps_from_class, get_steps_from_instance, ServiceDefinition, ) -from .context import Context -from .errors import ( - WorkflowDone, - WorkflowTimeoutError, - WorkflowValidationError, - WorkflowRuntimeError, -) -from .service import ServiceManager -from .session import WorkflowSession dispatcher = get_dispatcher(__name__) -class _WorkflowMeta(type): - def __init__(self, *args, **kwargs) -> None: - super().__init__(*args, **kwargs) - self._step_functions: Dict[str, Callable] = {} +class WorkflowMeta(type): + def __init__(cls, name: str, bases: Tuple[type, ...], dct: Dict[str, Any]) -> None: + super().__init__(name, bases, dct) + cls._step_functions: Dict[str, Callable] = {} + + +class Workflow(metaclass=WorkflowMeta): + """An event-driven abstraction used to orchestrate the execution of different components called "steps". + + Each step is responsible for handling certain event types and possibly emitting new events. Steps can be "bound" + when they are defined as methods of the `Workflow` class itself, or "unbound" when they are defined as free + functions. To define a step, the method or function must be decorated with the `@step` decorator. + + Workflows provide basic validation to catch potential runtime errors as soon as possible. Validation happens once, + when the workflow starts, and does not produce much overhead. It can be disabled in any case. + Use an instance of a `Workflow` class to run a workflow and stream events produced during execution. Workflows + can be run step-by-step, by calling the `run_step` function multiple times until completion. + """ -class Workflow(metaclass=_WorkflowMeta): def __init__( self, timeout: Optional[float] = 10.0, @@ -39,45 +48,78 @@ def __init__( verbose: bool = False, service_manager: Optional[ServiceManager] = None, ) -> None: + """Create an instance of the workflow. + + Args: + timeout: number of seconds after the workflow execution will be halted, raising a `WorkflowTimeoutError` + exception. If set to `None`, the timeout will be disabled. + disable_validaton: whether or not the workflow should be validated before running. In case the workflow is + misconfigured, a call to `run` will raise a `WorkflowValidationError` exception explaining the details + of the problem. + verbose: whether or not the workflow should print additional informative messages during execution. + service_manager: The instance of the `ServiceManager` used to make nested workflows available to this + workflow instance. The default value is the best choice unless you're customizing the workflow runtime. + """ # Configuration self._timeout = timeout self._verbose = verbose self._disable_validation = disable_validation # Broker machinery - self._sessions: Set[WorkflowSession] = set() - self._step_session: Optional[WorkflowSession] = None + self._contexts: Set[Context] = set() + self._stepwise_context: Optional[Context] = None # Services management self._service_manager = service_manager or ServiceManager() async def stream_events(self) -> AsyncGenerator[Event, None]: + """Returns an async generator to consume any event that workflow steps decide to stream. + + To be able to use this generator, the usual pattern is to wrap the `run` call in a background task using + `asyncio.create_task`, then enter a for loop like this: + + wf = StreamingWorkflow() + r = asyncio.create_task(wf.run()) + + async for ev in wf.stream_events(): + print(ev) + + await r + """ # In the typical streaming use case, `run()` is not awaited but wrapped in a asyncio.Task. Since we'll be # consuming events produced by `run()`, we must give its Task the chance to run before entering the dequeueing # loop. await asyncio.sleep(0) - if len(self._sessions) > 1: + if len(self._contexts) > 1: # We can't possibly know from what session we should stream events, raise an error. msg = ( - "This workflow has multiple session running concurrently and cannot stream events. " + "This workflow has multiple concurrent runs in progress and cannot stream events. " "To be able to stream events, make sure you call `run()` on this workflow only once." ) raise WorkflowRuntimeError(msg) # Enter the dequeuing loop. - session = next(iter(self._sessions)) + ctx = next(iter(self._contexts)) while True: - ev = await session.streaming_queue.get() + ev = await ctx.streaming_queue.get() if type(ev) is StopEvent: break yield ev + # remove context to free up room for the next stream_events call + self._contexts.remove(ctx) + @classmethod def add_step(cls, func: Callable) -> None: """Adds a free function as step for this workflow instance. It raises an exception if a step with the same name was already added to the workflow. """ + step_config: Optional[StepConfig] = getattr(func, "__step_config", None) + if not step_config: + msg = f"Step function {func.__name__} is missing the `@step` decorator." + raise WorkflowValidationError(msg) + if func.__name__ in {**get_steps_from_class(cls), **cls._step_functions}: msg = f"A step {func.__name__} is already part of this workflow, please choose another name." raise WorkflowValidationError(msg) @@ -95,24 +137,21 @@ def add_workflows(self, **workflows: "Workflow") -> None: def _get_steps(self) -> Dict[str, Callable]: """Returns all the steps, whether defined as methods or free functions.""" - return {**get_steps_from_instance(self), **self._step_functions} + return {**get_steps_from_instance(self), **self._step_functions} # type: ignore[attr-defined] - def _start(self, stepwise: bool = False) -> WorkflowSession: + def _start(self, stepwise: bool = False) -> Context: """Sets up the queues and tasks for each declared step. This method also launches each step as an async task. """ - session = WorkflowSession(self) - self._sessions.add(session) + ctx = Context(self) + self._contexts.add(ctx) for name, step_func in self._get_steps().items(): - session._queues[name] = asyncio.Queue() - session._step_flags[name] = asyncio.Event() - step_config: Optional[StepConfig] = getattr( - step_func, "__step_config", None - ) - if not step_config: - raise ValueError(f"Step {name} is missing `@step` decorator.") + ctx._queues[name] = asyncio.Queue() + ctx._step_flags[name] = asyncio.Event() + # At this point, step_func is guaranteed to have the `__step_config` attribute + step_config: StepConfig = getattr(step_func, "__step_config") async def _task( name: str, @@ -127,19 +166,19 @@ async def _task( # do we need to wait for the step flag? if stepwise: - await session._step_flags[name].wait() + await ctx._step_flags[name].wait() # clear all flags so that we only run one step - for flag in session._step_flags.values(): + for flag in ctx._step_flags.values(): flag.clear() if self._verbose and name != "_done": print(f"Running step {name}") # run step - kwargs = {} + kwargs: Dict[str, Any] = {} if config.context_parameter: - kwargs[config.context_parameter] = session.get_context(name) + kwargs[config.context_parameter] = ctx for service_definition in config.requested_services: service = self._service_manager.get( service_definition.name, service_definition.default_value @@ -147,12 +186,36 @@ async def _task( kwargs[service_definition.name] = service kwargs[config.event_name] = ev - # - check if its async or not - # - if not async, run it in an executor + # wrap the step with instrumentation instrumented_step = dispatcher.span(step) + # - check if its async or not + # - if not async, run it in an executor if asyncio.iscoroutinefunction(step): - new_ev = await instrumented_step(**kwargs) + retry_start_at = time.time() + attempts = 0 + while True: + try: + new_ev = await instrumented_step(**kwargs) + break # exit the retrying loop + except Exception as e: + if config.retry_policy is None: + raise e from None + + delay = config.retry_policy.next( + retry_start_at + time.time(), attempts, e + ) + if delay is None: + # We're done retrying + raise e from None + + attempts += 1 + if self._verbose: + print( + f"Step {name} produced an error, retry in {delay} seconds" + ) + await asyncio.sleep(delay) + else: run_task = functools.partial(instrumented_step, **kwargs) new_ev = await asyncio.get_event_loop().run_in_executor( @@ -170,61 +233,61 @@ async def _task( continue # Store the accepted event for the drawing operations - session._accepted_events.append((name, type(ev).__name__)) + ctx._accepted_events.append((name, type(ev).__name__)) if not isinstance(new_ev, Event): warnings.warn( f"Step function {name} returned {type(new_ev).__name__} instead of an Event instance." ) else: - session.send_event(new_ev) + ctx.send_event(new_ev) for _ in range(step_config.num_workers): - session._tasks.add( + ctx._tasks.add( asyncio.create_task( - _task(name, session._queues[name], step_func, step_config), + _task(name, ctx._queues[name], step_func, step_config), name=name, ) ) - return session + return ctx def send_event(self, message: Event, step: Optional[str] = None) -> None: msg = ( "Use a Context instance to send events from a step. " "Make sure your step method or function takes a parameter of type Context like `ctx: Context` and " - "replace `self.send_event(...)` with `ctx.session.send_event(...)` in your code." + "replace `self.send_event(...)` with `ctx.send_event(...)` in your code." ) - if len(self._sessions) > 1: + if len(self._contexts) > 1: # We can't possibly know to what session we should send this event, raise an error. raise WorkflowRuntimeError(msg) # Emit a warning as this won't work for multiple run()s. warnings.warn(msg) - session = next(iter(self._sessions)) - session.send_event(message=message, step=step) + ctx = next(iter(self._contexts)) + ctx.send_event(message=message, step=step) @dispatcher.span async def run(self, **kwargs: Any) -> str: """Runs the workflow until completion. Works by - 1. validating the workflow - 2. starting the workflow by setting up the queues and tasks - 3. sending a StartEvent to kick things off - 4. waiting for all tasks to finish or be cancelled + 1. validating the workflow + 2. starting the workflow by setting up the queues and tasks + 3. sending a StartEvent to kick things off + 4. waiting for all tasks to finish or be cancelled """ # Validate the workflow if needed self._validate() - # Start the machinery in a new session - session = self._start() + # Start the machinery in a new Context + ctx = self._start() # Send the first event - session.send_event(StartEvent(**kwargs)) + ctx.send_event(StartEvent(**kwargs)) done, unfinished = await asyncio.wait( - session._tasks, timeout=self._timeout, return_when=asyncio.FIRST_EXCEPTION + ctx._tasks, timeout=self._timeout, return_when=asyncio.FIRST_EXCEPTION ) we_done = False @@ -250,6 +313,8 @@ async def run(self, **kwargs: Any) -> str: # Bubble up the error if any step raised an exception if exception_raised: + # Make sure to stop streaming, in case the workflow terminated abnormally + ctx.write_event_to_stream(StopEvent()) raise exception_raised # Raise WorkflowTimeoutError if the workflow timed out @@ -257,28 +322,28 @@ async def run(self, **kwargs: Any) -> str: msg = f"Operation timed out after {self._timeout} seconds" raise WorkflowTimeoutError(msg) - return session._retval + return ctx._retval @dispatcher.span async def run_step(self, **kwargs: Any) -> Optional[str]: """Runs the workflow stepwise until completion. Works by - 1. Validating and setting up the queues and tasks if the first step hasn't been started - 2. Sending a StartEvent to kick things off - 3. Sets the flag for all steps to run once (if they can run) - 4. Waiting for the next step(s) to finish - 5. Returning the result if the workflow is done + 1. Validating and setting up the queues and tasks if the first step hasn't been started + 2. Sending a StartEvent to kick things off + 3. Sets the flag for all steps to run once (if they can run) + 4. Waiting for the next step(s) to finish + 5. Returning the result if the workflow is done """ # Check if we need to start a new session - if self._step_session is None: + if self._stepwise_context is None: self._validate() - self._step_session = self._start(stepwise=True) + self._stepwise_context = self._start(stepwise=True) # Run the first step - self._step_session.send_event(StartEvent(**kwargs)) + self._stepwise_context.send_event(StartEvent(**kwargs)) # Unblock all pending steps - for flag in self._step_session._step_flags.values(): + for flag in self._stepwise_context._step_flags.values(): flag.set() # Yield back control to the event loop to give an unblocked step @@ -288,30 +353,24 @@ async def run_step(self, **kwargs: Any) -> Optional[str]: # See if we're done, or if a step raised any error we_done = False exception_raised = None - for t in self._step_session._tasks: + for t in self._stepwise_context._tasks: + # Check if we're done if not t.done(): continue + we_done = True e = t.exception() - if e is None: - continue - - # Check if we're done - if type(e) == WorkflowDone: - we_done = True - continue - - # In any other case, bubble up the exception - exception_raised = e + if type(e) != WorkflowDone: + exception_raised = e retval = None if we_done: # Remove any reference to the tasks - for t in self._step_session._tasks: + for t in self._stepwise_context._tasks: t.cancel() await asyncio.sleep(0) - retval = self._step_session._retval - self._step_session = None + retval = self._stepwise_context._retval + self._stepwise_context = None if exception_raised: raise exception_raised @@ -320,13 +379,13 @@ async def run_step(self, **kwargs: Any) -> Optional[str]: def is_done(self) -> bool: """Checks if the workflow is done.""" - return self._step_session is None + return self._stepwise_context is None @step async def _done(self, ctx: Context, ev: StopEvent) -> None: """Tears down the whole workflow and stop execution.""" - ctx.session._retval = ev.result or None - ctx.session.write_event_to_stream(ev) + ctx._retval = ev.result or None + ctx.write_event_to_stream(ev) # Signal we want to stop the workflow raise WorkflowDone @@ -340,12 +399,10 @@ def _validate(self) -> None: consumed_events: Set[type] = set() requested_services: Set[ServiceDefinition] = set() - for name, step_func in self._get_steps().items(): - step_config: Optional[StepConfig] = getattr( - step_func, "__step_config", None - ) - if not step_config: - raise ValueError(f"Step {name} is missing `@step` decorator.") + for step_func in self._get_steps().values(): + step_config: Optional[StepConfig] = getattr(step_func, "__step_config") + # At this point we know step config is not None, let's make the checker happy + assert step_config is not None for event_type in step_config.accepted_events: consumed_events.add(event_type) @@ -362,25 +419,19 @@ def _validate(self) -> None: # Check if all consumed events are produced unconsumed_events = consumed_events - produced_events if unconsumed_events: + names = ", ".join(ev.__name__ for ev in unconsumed_events) raise WorkflowValidationError( - f"The following events are consumed but never produced: {unconsumed_events}" + f"The following events are consumed but never produced: {names}" ) # Check if there are any unused produced events (except StopEvent) unused_events = produced_events - consumed_events - {StopEvent} if unused_events: + names = ", ".join(ev.__name__ for ev in unused_events) raise WorkflowValidationError( - f"The following events are produced but never consumed: {unused_events}" + f"The following events are produced but never consumed: {names}" ) - # Check if there's at least one step that consumes StartEvent - if StartEvent not in consumed_events: - raise WorkflowValidationError("No step consumes StartEvent") - - # Check if there's at least one step that produces StopEvent - if StopEvent not in produced_events: - raise WorkflowValidationError("No step produces StopEvent") - # Check all the requested services are available required_service_names = { sd.name for sd in requested_services if sd.default_value is None diff --git a/llama-index-core/poetry.lock b/llama-index-core/poetry.lock index bb272c089811c..277ca7c3eaeb5 100644 --- a/llama-index-core/poetry.lock +++ b/llama-index-core/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -335,22 +335,22 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autodoc-pydantic" @@ -572,13 +572,13 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -937,38 +937,38 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "43.0.0" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -981,7 +981,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1197,13 +1197,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] @@ -1497,13 +1497,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1518,6 +1518,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" @@ -1535,13 +1536,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -1857,22 +1858,22 @@ files = [ [[package]] name = "jsii" -version = "1.102.0" +version = "1.103.1" description = "Python client for jsii runtime" optional = false python-versions = "~=3.8" files = [ - {file = "jsii-1.102.0-py3-none-any.whl", hash = "sha256:9e0f54acd55d8ea7a0bfd7e4a3dccacf6ca3466a8d67d47703594cffedad382a"}, - {file = "jsii-1.102.0.tar.gz", hash = "sha256:ee044964a0db600d9dcde85b4763beb996b3f56a4c951911eb3ff073deeb8603"}, + {file = "jsii-1.103.1-py3-none-any.whl", hash = "sha256:24b96349230ca22f50fcd69c501e69b6c486acf37bbe0b5869f4c185572b079e"}, + {file = "jsii-1.103.1.tar.gz", hash = "sha256:7eaa46e8cd9546edc6bba81d0b32df9f8ed8f5848305277d261cccfe00b9c1eb"}, ] [package.dependencies] -attrs = ">=21.2,<24.0" +attrs = ">=21.2,<25.0" cattrs = ">=1.8,<23.3" importlib-resources = ">=5.2.0" publication = ">=0.0.3" python-dateutil = "*" -typeguard = ">=2.13.3,<2.14.0" +typeguard = ">=2.13.3,<5.0.0" typing-extensions = ">=3.8,<5.0" [[package]] @@ -1945,23 +1946,22 @@ referencing = ">=0.31.0" [[package]] name = "jupyter" -version = "1.0.0" +version = "1.1.1" description = "Jupyter metapackage. Install all the Jupyter components in one go." optional = false python-versions = "*" files = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, + {file = "jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83"}, + {file = "jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a"}, ] [package.dependencies] ipykernel = "*" ipywidgets = "*" jupyter-console = "*" +jupyterlab = "*" nbconvert = "*" notebook = "*" -qtconsole = "*" [[package]] name = "jupyter-cache" @@ -2154,13 +2154,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.2.4" +version = "4.2.5" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.2.4-py3-none-any.whl", hash = "sha256:807a7ec73637744f879e112060d4b9d9ebe028033b7a429b2d1f4fc523d00245"}, - {file = "jupyterlab-4.2.4.tar.gz", hash = "sha256:343a979fb9582fd08c8511823e320703281cd072a0049bcdafdc7afeda7f2537"}, + {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, + {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, ] [package.dependencies] @@ -2298,13 +2298,13 @@ tornado = "*" [[package]] name = "llama-cloud" -version = "0.0.14" +version = "0.0.15" description = "" optional = false python-versions = "<4,>=3.8" files = [ - {file = "llama_cloud-0.0.14-py3-none-any.whl", hash = "sha256:356143a9d88d59ed8f0474841fcfba053fe8b56ff8bb3771e570d583869061f8"}, - {file = "llama_cloud-0.0.14.tar.gz", hash = "sha256:44157bd3fc84099365181fb0254b7b533a502f2a8b97d5f87e86d1cccc1501d8"}, + {file = "llama_cloud-0.0.15-py3-none-any.whl", hash = "sha256:52f18a3870e23c4a9b5f66827a58dc87d5a1c3034d1ce6ab513ca7eb09ae8b36"}, + {file = "llama_cloud-0.0.15.tar.gz", hash = "sha256:be06fd888e889623796b9c2aa0fc0d09ef039ed5145ff267d8408ccbea70c048"}, ] [package.dependencies] @@ -2783,52 +2783,49 @@ files = [ [[package]] name = "mypy" -version = "0.991" +version = "1.11.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"}, + {file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"}, + {file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"}, + {file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"}, + {file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"}, + {file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"}, + {file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"}, + {file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"}, + {file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"}, + {file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"}, + {file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"}, + {file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"}, + {file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"}, + {file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"}, + {file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"}, + {file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"}, + {file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"}, + {file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"}, + {file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"}, + {file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"}, + {file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"}, + {file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"}, + {file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"}, + {file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"}, + {file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"}, + {file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"}, + {file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -3043,13 +3040,13 @@ files = [ [[package]] name = "notebook" -version = "7.2.1" +version = "7.2.2" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.2.1-py3-none-any.whl", hash = "sha256:f45489a3995746f2195a137e0773e2130960b51c9ac3ce257dbc2705aab3a6ca"}, - {file = "notebook-7.2.1.tar.gz", hash = "sha256:4287b6da59740b32173d01d641f763d292f49c30e7a51b89c46ba8473126341e"}, + {file = "notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c"}, + {file = "notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e"}, ] [package.dependencies] @@ -3120,13 +3117,13 @@ files = [ [[package]] name = "openai" -version = "1.42.0" +version = "1.43.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.42.0-py3-none-any.whl", hash = "sha256:dc91e0307033a4f94931e5d03cc3b29b9717014ad5e73f9f2051b6cb5eda4d80"}, - {file = "openai-1.42.0.tar.gz", hash = "sha256:c9d31853b4e0bc2dc8bd08003b462a006035655a701471695d0bfdc08529cde3"}, + {file = "openai-1.43.0-py3-none-any.whl", hash = "sha256:1a748c2728edd3a738a72a0212ba866f4fdbe39c9ae03813508b267d45104abe"}, + {file = "openai-1.43.0.tar.gz", hash = "sha256:e607aff9fc3e28eade107e5edd8ca95a910a4b12589336d3cbb6bfe2ac306b3c"}, ] [package.dependencies] @@ -3201,7 +3198,7 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" @@ -4174,48 +4171,6 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} -[[package]] -name = "qtconsole" -version = "5.5.2" -description = "Jupyter Qt console" -optional = false -python-versions = ">=3.8" -files = [ - {file = "qtconsole-5.5.2-py3-none-any.whl", hash = "sha256:42d745f3d05d36240244a04e1e1ec2a86d5d9b6edb16dbdef582ccb629e87e0b"}, - {file = "qtconsole-5.5.2.tar.gz", hash = "sha256:6b5fb11274b297463706af84dcbbd5c92273b1f619e6d25d08874b0a88516989"}, -] - -[package.dependencies] -ipykernel = ">=4.1" -jupyter-client = ">=4.1" -jupyter-core = "*" -packaging = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.4.0" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.4.1" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -optional = false -python-versions = ">=3.7" -files = [ - {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, - {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] - [[package]] name = "rake-nltk" version = "1.0.6" @@ -4552,19 +4507,23 @@ win32 = ["pywin32"] [[package]] name = "setuptools" -version = "73.0.1" +version = "74.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, + {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -4884,13 +4843,17 @@ files = [ {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, @@ -4907,7 +4870,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") or extra == \"asyncio\""} +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} typing-extensions = ">=4.6.0" [package.extras] @@ -5473,13 +5436,13 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "1.26.19" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, - {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -5687,101 +5650,103 @@ files = [ [[package]] name = "yarl" -version = "1.9.4" +version = "1.9.7" description = "Yet another URL library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.9.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60c04415b31a1611ef5989a6084dd6f6b95652c6a18378b58985667b65b2ecb6"}, + {file = "yarl-1.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1787dcfdbe730207acb454548a6e19f80ae75e6d2d1f531c5a777bc1ab6f7952"}, + {file = "yarl-1.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5ddad20363f9f1bbedc95789c897da62f939e6bc855793c3060ef8b9f9407bf"}, + {file = "yarl-1.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdb156a06208fc9645ae7cc0fca45c40dd40d7a8c4db626e542525489ca81a9"}, + {file = "yarl-1.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:522fa3d300d898402ae4e0fa7c2c21311248ca43827dc362a667de87fdb4f1be"}, + {file = "yarl-1.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7f9cabfb8b980791b97a3ae3eab2e38b2ba5eab1af9b7495bdc44e1ce7c89e3"}, + {file = "yarl-1.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fc728857df4087da6544fc68f62d7017fa68d74201d5b878e18ed4822c31fb3"}, + {file = "yarl-1.9.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dba2ebac677184d56374fa3e452b461f5d6a03aa132745e648ae8859361eb6b"}, + {file = "yarl-1.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a95167ae34667c5cc7d9206c024f793e8ffbadfb307d5c059de470345de58a21"}, + {file = "yarl-1.9.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9d319ac113ca47352319cbea92d1925a37cb7bd61a8c2f3e3cd2e96eb33cccae"}, + {file = "yarl-1.9.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2d71a5d818d82586ac46265ae01466e0bda0638760f18b21f1174e0dd58a9d2f"}, + {file = "yarl-1.9.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ff03f1c1ac474c66d474929ae7e4dd195592c1c7cc8c36418528ed81b1ca0a79"}, + {file = "yarl-1.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78250f635f221dde97d02c57aade3313310469bc291888dfe32acd1012594441"}, + {file = "yarl-1.9.7-cp310-cp310-win32.whl", hash = "sha256:f3aaf9fa960d55bd7876d55d7ea3cc046f3660df1ff73fc1b8c520a741ed1f21"}, + {file = "yarl-1.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:e8362c941e07fbcde851597672a5e41b21dc292b7d5a1dc439b7a93c9a1af5d9"}, + {file = "yarl-1.9.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:596069ddeaf72b5eb36cd714dcd2b5751d0090d05a8d65113b582ed9e1c801fb"}, + {file = "yarl-1.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cb870907e8b86b2f32541403da9455afc1e535ce483e579bea0e6e79a0cc751c"}, + {file = "yarl-1.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca5e86be84492fa403c4dcd4dcaf8e1b1c4ffc747b5176f7c3d09878c45719b0"}, + {file = "yarl-1.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99cecfb51c84d00132db909e83ae388793ca86e48df7ae57f1be0beab0dcce5"}, + {file = "yarl-1.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25508739e9b44d251172145f54c084b71747b09e4d237dc2abb045f46c36a66e"}, + {file = "yarl-1.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:60f3b5aec3146b6992640592856414870f5b20eb688c1f1d5f7ac010a7f86561"}, + {file = "yarl-1.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1557456afce5db3d655b5f8a31cdcaae1f47e57958760525c44b76e812b4987"}, + {file = "yarl-1.9.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71bb1435a84688ed831220c5305d96161beb65cac4a966374475348aa3de4575"}, + {file = "yarl-1.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f87d8645a7a806ec8f66aac5e3b1dcb5014849ff53ffe2a1f0b86ca813f534c7"}, + {file = "yarl-1.9.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:58e3f01673873b8573da3abe138debc63e4e68541b2104a55df4c10c129513a4"}, + {file = "yarl-1.9.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8af0bbd4d84f8abdd9b11be9488e32c76b1501889b73c9e2292a15fb925b378b"}, + {file = "yarl-1.9.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7fc441408ed0d9c6d2d627a02e281c21f5de43eb5209c16636a17fc704f7d0f8"}, + {file = "yarl-1.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a9552367dc440870556da47bb289a806f08ad06fbc4054072d193d9e5dd619ba"}, + {file = "yarl-1.9.7-cp311-cp311-win32.whl", hash = "sha256:628619008680a11d07243391271b46f07f13b75deb9fe92ef342305058c70722"}, + {file = "yarl-1.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:bc23d870864971c8455cfba17498ccefa53a5719ea9f5fce5e7e9c1606b5755f"}, + {file = "yarl-1.9.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d8cf3d0b67996edc11957aece3fbce4c224d0451c7c3d6154ec3a35d0e55f6b"}, + {file = "yarl-1.9.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a7748cd66fef49c877e59503e0cc76179caf1158d1080228e67e1db14554f08"}, + {file = "yarl-1.9.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a6fa3aeca8efabb0fbbb3b15e0956b0cb77f7d9db67c107503c30af07cd9e00"}, + {file = "yarl-1.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf37dd0008e5ac5c3880198976063c491b6a15b288d150d12833248cf2003acb"}, + {file = "yarl-1.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87aa5308482f248f8c3bd9311cd6c7dfd98ea1a8e57e35fb11e4adcac3066003"}, + {file = "yarl-1.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:867b13c1b361f9ba5d2f84dc5408082f5d744c83f66de45edc2b96793a9c5e48"}, + {file = "yarl-1.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ce93947554c2c85fe97fc4866646ec90840bc1162e4db349b37d692a811755"}, + {file = "yarl-1.9.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcd3d94b848cba132f39a5b40d80b0847d001a91a6f35a2204505cdd46afe1b2"}, + {file = "yarl-1.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d06d6a8f98dd87646d98f0c468be14b201e47ec6092ad569adf835810ad0dffb"}, + {file = "yarl-1.9.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:91567ff4fce73d2e7ac67ed5983ad26ba2343bc28cb22e1e1184a9677df98d7c"}, + {file = "yarl-1.9.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1d5594512541e63188fea640b7f066c218d2176203d6e6f82abf702ae3dca3b2"}, + {file = "yarl-1.9.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c2743e43183e4afbb07d5605693299b8756baff0b086c25236c761feb0e3c56"}, + {file = "yarl-1.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:daa69a3a2204355af39f4cfe7f3870d87c53d77a597b5100b97e3faa9460428b"}, + {file = "yarl-1.9.7-cp312-cp312-win32.whl", hash = "sha256:36b16884336c15adf79a4bf1d592e0c1ffdb036a760e36a1361565b66785ec6c"}, + {file = "yarl-1.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:2ead2f87a1174963cc406d18ac93d731fbb190633d3995fa052d10cefae69ed8"}, + {file = "yarl-1.9.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:808eddabcb6f7b2cdb6929b3e021ac824a2c07dc7bc83f7618e18438b1b65781"}, + {file = "yarl-1.9.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:395ab0d8ce6d104a988da429bcbfd445e03fb4c911148dfd523f69d13f772e47"}, + {file = "yarl-1.9.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:49827dfccbd59c4499605c13805e947349295466e490860a855b7c7e82ec9c75"}, + {file = "yarl-1.9.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b8bbdd425d0978311520ea99fb6c0e9e04e64aee84fac05f3157ace9f81b05"}, + {file = "yarl-1.9.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71d33fd1c219b5b28ee98cd76da0c9398a4ed4792fd75c94135237db05ba5ca8"}, + {file = "yarl-1.9.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62440431741d0b7d410e5cbad800885e3289048140a43390ecab4f0b96dde3bb"}, + {file = "yarl-1.9.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db97210433366dfba55590e48285b89ad0146c52bf248dd0da492dd9f0f72cf"}, + {file = "yarl-1.9.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:653597b615809f2e5f4dba6cd805608b6fd3597128361a22cc612cf7c7a4d1bf"}, + {file = "yarl-1.9.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:df47612129e66f7ce7c9994d4cd4e6852f6e3bf97699375d86991481796eeec8"}, + {file = "yarl-1.9.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5e338b6febbae6c9fe86924bac3ea9c1944e33255c249543cd82a4af6df6047b"}, + {file = "yarl-1.9.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e649d37d04665dddb90994bbf0034331b6c14144cc6f3fbce400dc5f28dc05b7"}, + {file = "yarl-1.9.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0a1b8fd849567be56342e988e72c9d28bd3c77b9296c38b9b42d2fe4813c9d3f"}, + {file = "yarl-1.9.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f9d715b2175dff9a49c6dafdc2ab3f04850ba2f3d4a77f69a5a1786b057a9d45"}, + {file = "yarl-1.9.7-cp313-cp313-win32.whl", hash = "sha256:bc9233638b07c2e4a3a14bef70f53983389bffa9e8cb90a2da3f67ac9c5e1842"}, + {file = "yarl-1.9.7-cp313-cp313-win_amd64.whl", hash = "sha256:62e110772330d7116f91e79cd83fef92545cb2f36414c95881477aa01971f75f"}, + {file = "yarl-1.9.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a564155cc2194ecd9c0d8f8dc57059b822a507de5f08120063675eb9540576aa"}, + {file = "yarl-1.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03e917cc44a01e1be60a83ee1a17550b929490aaa5df2a109adc02137bddf06b"}, + {file = "yarl-1.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eefda67ba0ba44ab781e34843c266a76f718772b348f7c5d798d8ea55b95517f"}, + {file = "yarl-1.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:316c82b499b6df41444db5dea26ee23ece9356e38cea43a8b2af9e6d8a3558e4"}, + {file = "yarl-1.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10452727843bc847596b75e30a7fe92d91829f60747301d1bd60363366776b0b"}, + {file = "yarl-1.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:050f3e4d886be55728fef268587d061c5ce6f79a82baba71840801b63441c301"}, + {file = "yarl-1.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0aabe557446aa615693a82b4d3803c102fd0e7a6a503bf93d744d182a510184"}, + {file = "yarl-1.9.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23404842228e6fa8ace235024519df37f3f8e173620407644d40ddca571ff0f4"}, + {file = "yarl-1.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:34736fcc9d6d7080ebbeb0998ecb91e4f14ad8f18648cf0b3099e2420a225d86"}, + {file = "yarl-1.9.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:48f7a158f3ca67509d21cb02a96964e4798b6f133691cc0c86cf36e26e26ec8f"}, + {file = "yarl-1.9.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:6639444d161c693cdabb073baaed1945c717d3982ecedf23a219bc55a242e728"}, + {file = "yarl-1.9.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:1cd450e10cb53d63962757c3f6f7870be49a3e448c46621d6bd46f8088d532de"}, + {file = "yarl-1.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74d3ef5e81f81507cea04bf5ae22f18ef538607a7c754aac2b6e3029956a2842"}, + {file = "yarl-1.9.7-cp38-cp38-win32.whl", hash = "sha256:4052dbd0c900bece330e3071c636f99dff06e4628461a29b38c6e222a427cf98"}, + {file = "yarl-1.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:dd08da4f2d171e19bd02083c921f1bef89f8f5f87000d0ffc49aa257bc5a9802"}, + {file = "yarl-1.9.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ab906a956d2109c6ea11e24c66592b06336e2743509290117f0f7f47d2c1dd3"}, + {file = "yarl-1.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d8ad761493d5aaa7ab2a09736e62b8a220cb0b10ff8ccf6968c861cd8718b915"}, + {file = "yarl-1.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d35f9cdab0ec5e20cf6d2bd46456cf599052cf49a1698ef06b9592238d1cf1b1"}, + {file = "yarl-1.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a48d2b9f0ae29a456fb766ae461691378ecc6cf159dd9f938507d925607591c3"}, + {file = "yarl-1.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf85599c9336b89b92c313519bcaa223d92fa5d98feb4935a47cce2e8722b4b8"}, + {file = "yarl-1.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e8916b1ff7680b1f2b1608c82dc15c569b9f2cb2da100c747c291f1acf18a14"}, + {file = "yarl-1.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29c80890e0a64fb0e5f71350d48da330995073881f8b8e623154aef631febfb0"}, + {file = "yarl-1.9.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9163d21aa40ff8528db2aee2b0b6752efe098055b41ab8e5422b2098457199fe"}, + {file = "yarl-1.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:65e3098969baf221bb45e3b2f60735fc2b154fc95902131ebc604bae4c629ea6"}, + {file = "yarl-1.9.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cddebd096effe4be90fd378e4224cd575ac99e1c521598a6900e94959006e02e"}, + {file = "yarl-1.9.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8525f955a2dcc281573b6aadeb8ab9c37e2d3428b64ca6a2feec2a794a69c1da"}, + {file = "yarl-1.9.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:5d585c7d834c13f24c7e3e0efaf1a4b7678866940802e11bd6c4d1f99c935e6b"}, + {file = "yarl-1.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78805148e780a9ca66f3123e04741e344b66cf06b4fb13223e3a209f39a6da55"}, + {file = "yarl-1.9.7-cp39-cp39-win32.whl", hash = "sha256:3f53df493ec80b76969d6e1ae6e4411a55ab1360e02b80c84bd4b33d61a567ba"}, + {file = "yarl-1.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:c81c28221a85add23a0922a6aeb2cdda7f9723e03e2dfae06fee5c57fe684262"}, + {file = "yarl-1.9.7-py3-none-any.whl", hash = "sha256:49935cc51d272264358962d050d726c3e5603a616f53e52ea88e9df1728aa2ee"}, + {file = "yarl-1.9.7.tar.gz", hash = "sha256:f28e602edeeec01fc96daf7728e8052bc2e12a672e2a138561a1ebaf30fd9df7"}, ] [package.dependencies] @@ -5790,20 +5755,24 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "eb6df85ec43515589690f26e8bf203c461822419637b16041f32e9183f09edb5" +content-hash = "c8f7cb4bf41616a6010eaf8acb80fc7b339cf8a3e2b12a7da15880b83a05e6ff" diff --git a/llama-index-core/pyproject.toml b/llama-index-core/pyproject.toml index b41575c42e182..629bf2b30697a 100644 --- a/llama-index-core/pyproject.toml +++ b/llama-index-core/pyproject.toml @@ -14,7 +14,10 @@ skip = "./llama_index/core/_static,./examples,./experimental,*.csv,*.html,*.json disallow_untyped_defs = true # Remove venv skip when integrated with pre-commit exclude = ["_static", "build", "examples", "notebooks", "venv"] +explicit_package_bases = true ignore_missing_imports = true +namespace_packages = true +plugins = "pydantic.mypy" python_version = "3.8" [tool.poetry] @@ -43,7 +46,7 @@ name = "llama-index-core" packages = [{include = "llama_index"}] readme = "README.md" repository = "https://github.com/run-llama/llama_index" -version = "0.11.1" +version = "0.11.9" [tool.poetry.dependencies] SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"} @@ -67,7 +70,7 @@ tqdm = "^4.66.1" pillow = ">=9.0.0" PyYAML = ">=6.0.1" wrapt = "*" -pydantic = ">=2.0.0,<3.0.0" +pydantic = ">=2.7.0,<3.0.0" [tool.poetry.group.dev.dependencies] black = {extras = ["jupyter"], version = ">=23.7.0,<=24.3.0"} @@ -79,7 +82,7 @@ ipython = "8.10.0" jupyter = "^1.0.0" llama-cloud = ">=0.0.6" motor = "^3.3.2" -mypy = "0.991" +mypy = "1.11.0" openai = "*" pandas = "*" pre-commit = "3.2.0" diff --git a/llama-index-core/tests/agent/react/test_react_output_parser.py b/llama-index-core/tests/agent/react/test_react_output_parser.py index 1e7525fb02226..4f587654390f8 100644 --- a/llama-index-core/tests/agent/react/test_react_output_parser.py +++ b/llama-index-core/tests/agent/react/test_react_output_parser.py @@ -130,6 +130,20 @@ def test_extract_tool_use_spurious_newlines() -> None: assert action_input == '{"a": 1, "b": 1}' +def test_extract_tool_use_with_Chinese_characters() -> None: + mock_input_text = """\ +Thought: I need to use a tool to help me answer the question. + +Action: 加法 + +Action Input: {"a": 1, "b": 1} +""" + thought, action, action_input = extract_tool_use(mock_input_text) + assert thought == "I need to use a tool to help me answer the question." + assert action == "加法" + assert action_input == '{"a": 1, "b": 1}' + + def test_extract_final_response() -> None: mock_input_text = """\ Thought: I have enough information to answer the question without using any more tools. diff --git a/llama-index-core/tests/memory/test_chat_summary_memory_buffer.py b/llama-index-core/tests/memory/test_chat_summary_memory_buffer.py index 833bad19e85d5..dbcbbc62f3c11 100644 --- a/llama-index-core/tests/memory/test_chat_summary_memory_buffer.py +++ b/llama-index-core/tests/memory/test_chat_summary_memory_buffer.py @@ -251,7 +251,7 @@ def test_get_when_initial_tokens_exceed_limit_raises_value_error() -> None: # When I get the chat history from the memory with pytest.raises(ValueError) as error: - memory.get(initial_tokens) + memory.get(initial_token_count=initial_tokens) # Then a value error should be raised assert str(error.value) == "Initial token count exceeds token limit" diff --git a/llama-index-core/tests/tools/test_types.py b/llama-index-core/tests/tools/test_types.py index 5dd8a99bf68e6..f38831b5a092a 100644 --- a/llama-index-core/tests/tools/test_types.py +++ b/llama-index-core/tests/tools/test_types.py @@ -1,7 +1,18 @@ import pytest + +from llama_index.core.bridge.pydantic import BaseModel +from llama_index.core.program.function_program import _get_function_tool from llama_index.core.tools.types import ToolMetadata +class Inner(BaseModel): + name: str + + +class Outer(BaseModel): + inner: Inner + + def test_toolmetadata_openai_tool_description_max_length() -> None: openai_tool_description_limit = 1024 valid_description = "a" * openai_tool_description_limit @@ -12,3 +23,18 @@ def test_toolmetadata_openai_tool_description_max_length() -> None: with pytest.raises(ValueError): ToolMetadata(invalid_description).to_openai_tool() + + +def test_nested_tool_schema() -> None: + tool = _get_function_tool(Outer) + schema = tool.metadata.get_parameters_dict() + + assert "$defs" in schema + defs = schema["$defs"] + assert "Inner" in defs + inner = defs["Inner"] + assert inner["required"][0] == "name" + assert inner["properties"] == {"name": {"title": "Name", "type": "string"}} + + assert schema["required"][0] == "inner" + assert schema["properties"] == {"inner": {"$ref": "#/$defs/Inner"}} diff --git a/llama-index-core/tests/vector_stores/test_simple.py b/llama-index-core/tests/vector_stores/test_simple.py index 119e740231aee..ba62fb7f0a646 100644 --- a/llama-index-core/tests/vector_stores/test_simple.py +++ b/llama-index-core/tests/vector_stores/test_simple.py @@ -1,7 +1,16 @@ import unittest +from pathlib import Path from typing import List -from llama_index.core.schema import NodeRelationship, RelatedNodeInfo, TextNode +import pytest + +from llama_index.core import VectorStoreIndex, MockEmbedding +from llama_index.core.schema import ( + NodeRelationship, + RelatedNodeInfo, + TextNode, + Document, +) from llama_index.core.vector_stores import SimpleVectorStore from llama_index.core.vector_stores.types import ( ExactMatchFilter, @@ -17,6 +26,15 @@ _NODE_ID_WEIGHT_3_RANK_C = "452D24AB-F185-414C-A352-590B4B9EE51B" +@pytest.fixture() +def persist_dir(tmp_path: Path): + index = VectorStoreIndex.from_documents( + [Document(id_="1", text="1")], embed_model=MockEmbedding(embed_dim=1) + ) + index.storage_context.persist(str(tmp_path)) + return str(tmp_path) + + def _node_embeddings_for_test() -> List[TextNode]: return [ TextNode( @@ -434,3 +452,15 @@ def test_delete_nodes(self) -> None: query = VectorStoreQuery(query_embedding=[1.0, 1.0], similarity_top_k=3) result = simple_vector_store.query(query) self.assertEqual(result.ids, [_NODE_ID_WEIGHT_3_RANK_C]) + + +def test_from_persist_dir(persist_dir: str) -> None: + vector_store = SimpleVectorStore.from_persist_dir(persist_dir=persist_dir) + assert vector_store is not None + + +def test_from_namespaced_persist_dir(persist_dir: str) -> None: + vector_store = SimpleVectorStore.from_namespaced_persist_dir( + persist_dir=persist_dir + ) + assert vector_store is not None diff --git a/llama-index-core/tests/workflow/conftest.py b/llama-index-core/tests/workflow/conftest.py index b7f48d05487f2..e9d6278b2d982 100644 --- a/llama-index-core/tests/workflow/conftest.py +++ b/llama-index-core/tests/workflow/conftest.py @@ -2,7 +2,7 @@ from llama_index.core.workflow.decorators import step from llama_index.core.workflow.events import StartEvent, StopEvent, Event -from llama_index.core.workflow.session import WorkflowSession +from llama_index.core.workflow.context import Context from llama_index.core.workflow.workflow import Workflow from llama_index.core.bridge.pydantic import Field @@ -44,5 +44,5 @@ def events(): @pytest.fixture() -def session(): - return WorkflowSession(workflow=Workflow()) +def ctx(): + return Context(workflow=Workflow()) diff --git a/llama-index-core/tests/workflow/test_context.py b/llama-index-core/tests/workflow/test_context.py index 27178d146f522..fd4b3c6613114 100644 --- a/llama-index-core/tests/workflow/test_context.py +++ b/llama-index-core/tests/workflow/test_context.py @@ -1,22 +1,19 @@ -import pytest +from unittest import mock from typing import Union, Optional +import pytest from llama_index.core.workflow.workflow import ( Workflow, Context, ) from llama_index.core.workflow.decorators import step -from llama_index.core.workflow.events import StartEvent, StopEvent +from llama_index.core.workflow.errors import WorkflowRuntimeError +from llama_index.core.workflow.events import StartEvent, StopEvent, Event from llama_index.core.workflow.workflow import Workflow from .conftest import OneTestEvent, AnotherTestEvent -def test_context_ctor(): - with pytest.raises(ValueError): - ctx = Context() - - @pytest.mark.asyncio() async def test_collect_events(): ev1 = OneTestEvent() @@ -46,43 +43,80 @@ async def step3( @pytest.mark.asyncio() -async def test_set_global(session): - c1 = Context(session=session) - await c1.set(key="test_key", value=42) - - c2 = Context(parent=c1) - assert await c2.get(key="test_key") == 42 +async def test_get_default(workflow): + c1 = Context(workflow) + assert await c1.get(key="test_key", default=42) == 42 @pytest.mark.asyncio() -async def test_set_private(session): - c1 = Context(session=session) - await c1.set(key="test_key", value=42, make_private=True) - assert await c1.get(key="test_key") == 42 +async def test_get(ctx): + await ctx.set("foo", 42) + assert await ctx.get("foo") == 42 + - c2 = Context(parent=c1) +@pytest.mark.asyncio() +async def test_get_not_found(ctx): with pytest.raises(ValueError): - await c2.get(key="test_key") + await ctx.get("foo") @pytest.mark.asyncio() -async def test_set_private_duplicate(session): - c1 = Context(session=session) +async def test_legacy_data(workflow): + c1 = Context(workflow) await c1.set(key="test_key", value=42) + assert c1.data["test_key"] == 42 - c2 = Context(parent=c1) - with pytest.raises(ValueError): - await c2.set(key="test_key", value=99, make_private=True) +def test_send_event_step_is_none(ctx): + ctx._queues = {"step1": mock.MagicMock(), "step2": mock.MagicMock()} + ev = Event(foo="bar") + ctx.send_event(ev) + for q in ctx._queues.values(): + q.put_nowait.assert_called_with(ev) + assert ctx._broker_log == [ev] -@pytest.mark.asyncio() -async def test_get_default(session): - c1 = Context(session=session) - assert await c1.get(key="test_key", default=42) == 42 + +def test_send_event_to_non_existent_step(ctx): + with pytest.raises( + WorkflowRuntimeError, match="Step does_not_exist does not exist" + ): + ctx.send_event(Event(), "does_not_exist") + + +def test_send_event_to_wrong_step(ctx): + ctx._workflow._get_steps = mock.MagicMock(return_value={"step": mock.MagicMock()}) + + with pytest.raises( + WorkflowRuntimeError, + match="Step step does not accept event of type ", + ): + ctx.send_event(Event(), "step") + + +def test_send_event_to_step(ctx): + step2 = mock.MagicMock() + step2.__step_config.accepted_events = [Event] + + ctx._workflow._get_steps = mock.MagicMock( + return_value={"step1": mock.MagicMock(), "step2": step2} + ) + ctx._queues = {"step1": mock.MagicMock(), "step2": mock.MagicMock()} + + ev = Event(foo="bar") + ctx.send_event(ev, "step2") + + ctx._queues["step1"].put_nowait.assert_not_called() + ctx._queues["step2"].put_nowait.assert_called_with(ev) + + +def test_get_result(ctx): + ctx._retval = 42 + assert ctx.get_result() == 42 @pytest.mark.asyncio() -async def test_legacy_data(session): - c1 = Context(session=session) - await c1.set(key="test_key", value=42) - assert c1.data["test_key"] == 42 +async def test_deprecated_params(ctx): + with pytest.warns( + DeprecationWarning, match="`make_private` is deprecated and will be ignored" + ): + await ctx.set("foo", 42, make_private=True) diff --git a/llama-index-core/tests/workflow/test_event.py b/llama-index-core/tests/workflow/test_event.py index bc3a152320afc..49973fdfef065 100644 --- a/llama-index-core/tests/workflow/test_event.py +++ b/llama-index-core/tests/workflow/test_event.py @@ -1,3 +1,5 @@ +import pytest + from llama_index.core.workflow.events import Event from llama_index.core.bridge.pydantic import PrivateAttr from typing import Any @@ -38,7 +40,7 @@ def test_event_init_basic(): def test_custom_event_with_fields_and_private_params(): - evt = _TestEvent(a=1, param="test_param", _private_param_1="test_private_param_1") + evt = _TestEvent(a=1, param="test_param", _private_param_1="test_private_param_1") # type: ignore assert evt.a == 1 assert evt["a"] == evt.a @@ -56,3 +58,32 @@ def test_custom_event_override_init(): assert evt._data == {"a": 1, "b": 2} assert evt._private_param == 2 assert evt._modified_private_param == 4 + + +def test_event_missing_key(): + ev = _TestEvent(param="bar") + with pytest.raises(AttributeError): + ev.wrong_key + + +def test_event_not_a_field(): + ev = _TestEvent(param="foo", not_a_field="bar") # type: ignore + assert ev._data["not_a_field"] == "bar" + ev.not_a_field = "baz" + assert ev._data["not_a_field"] == "baz" + ev["not_a_field"] = "barbaz" + assert ev._data["not_a_field"] == "barbaz" + assert ev.get("not_a_field") == "barbaz" + + +def test_event_dict_api(): + ev = _TestEvent(param="foo") + assert len(ev) == 0 + ev["a_new_key"] = "bar" + assert len(ev) == 1 + assert list(ev.values()) == ["bar"] + k, v = next(iter(ev.items())) + assert k == "a_new_key" + assert v == "bar" + assert next(iter(ev)) == "a_new_key" + assert ev.dict() == {"a_new_key": "bar"} diff --git a/llama-index-core/tests/workflow/test_retry_policy.py b/llama-index-core/tests/workflow/test_retry_policy.py new file mode 100644 index 0000000000000..30bafc47c009c --- /dev/null +++ b/llama-index-core/tests/workflow/test_retry_policy.py @@ -0,0 +1,46 @@ +import pytest + +from llama_index.core.workflow.context import Context +from llama_index.core.workflow.decorators import step +from llama_index.core.workflow.events import Event, StartEvent, StopEvent +from llama_index.core.workflow.retry_policy import ConstantDelayRetryPolicy +from llama_index.core.workflow.workflow import Workflow + + +@pytest.mark.asyncio() +async def test_retry_e2e(): + class CountEvent(Event): + """Empty event to signal a step to increment a counter in the Context.""" + + class DummyWorkflow(Workflow): + # Set a small delay to avoid impacting the CI speed too much + @step(retry_policy=ConstantDelayRetryPolicy(delay=0.2)) + async def flaky_step(self, ctx: Context, ev: StartEvent) -> StopEvent: + count = await ctx.get("counter", default=0) + ctx.send_event(CountEvent()) + if count < 3: + raise ValueError("Something bad happened!") + return StopEvent(result="All good!") + + @step + async def counter(self, ctx: Context, ev: CountEvent) -> None: + count = await ctx.get("counter", default=0) + await ctx.set("counter", count + 1) + + workflow = DummyWorkflow(disable_validation=True) + assert await workflow.run() == "All good!" + + +def test_ConstantDelayRetryPolicy_init(): + p = ConstantDelayRetryPolicy() + assert p.maximum_attempts == 3 + assert p.delay == 5 + + +def test_ConstantDelayRetryPolicy_next(): + delay = 4.2 + p = ConstantDelayRetryPolicy(maximum_attempts=5, delay=delay) + assert p.next(elapsed_time=0.0, attempts=4, error=Exception()) == delay + assert p.next(elapsed_time=0.0, attempts=5, error=Exception()) is None + # This should never happen but ensure the code is resilient + assert p.next(elapsed_time=0.0, attempts=999, error=Exception()) is None diff --git a/llama-index-core/tests/workflow/test_session.py b/llama-index-core/tests/workflow/test_session.py deleted file mode 100644 index 0b39ff4537632..0000000000000 --- a/llama-index-core/tests/workflow/test_session.py +++ /dev/null @@ -1,61 +0,0 @@ -from unittest import mock - -import pytest - -from llama_index.core.workflow.errors import WorkflowRuntimeError -from llama_index.core.workflow.events import Event - - -def test_send_event_step_is_none(session): - session._queues = {"step1": mock.MagicMock(), "step2": mock.MagicMock()} - ev = Event(foo="bar") - session.send_event(ev) - for q in session._queues.values(): - q.put_nowait.assert_called_with(ev) - assert session._broker_log == [ev] - - -def test_send_event_to_non_existent_step(session): - with pytest.raises( - WorkflowRuntimeError, match="Step does_not_exist does not exist" - ): - session.send_event(Event(), "does_not_exist") - - -def test_send_event_to_wrong_step(session): - session._workflow._get_steps = mock.MagicMock( - return_value={"step": mock.MagicMock()} - ) - - with pytest.raises( - WorkflowRuntimeError, - match="Step step does not accept event of type ", - ): - session.send_event(Event(), "step") - - -def test_send_event_to_step(session): - step2 = mock.MagicMock() - step2.__step_config.accepted_events = [Event] - - session._workflow._get_steps = mock.MagicMock( - return_value={"step1": mock.MagicMock(), "step2": step2} - ) - session._queues = {"step1": mock.MagicMock(), "step2": mock.MagicMock()} - - ev = Event(foo="bar") - session.send_event(ev, "step2") - - session._queues["step1"].put_nowait.assert_not_called() - session._queues["step2"].put_nowait.assert_called_with(ev) - - -def test_get_result(session): - session._retval = 42 - assert session.get_result() == 42 - - -def test_get_context(session): - ctx = session.get_context("step") - assert session._step_to_context["step"] == ctx - assert ctx._parent == session._root_context diff --git a/llama-index-core/tests/workflow/test_streaming.py b/llama-index-core/tests/workflow/test_streaming.py index 7a3a00bb1dffc..9a31a2516b86d 100644 --- a/llama-index-core/tests/workflow/test_streaming.py +++ b/llama-index-core/tests/workflow/test_streaming.py @@ -8,6 +8,8 @@ from llama_index.core.workflow.workflow import Workflow from llama_index.core.workflow.errors import WorkflowRuntimeError +from .conftest import OneTestEvent + class StreamingWorkflow(Workflow): @step @@ -40,7 +42,45 @@ async def test_too_many_runs(): r = asyncio.gather(wf.run(), wf.run()) with pytest.raises( WorkflowRuntimeError, - match="This workflow has multiple session running concurrently", + match="This workflow has multiple concurrent runs in progress and cannot stream events", ): async for ev in wf.stream_events(): pass + await r + + +@pytest.mark.asyncio() +async def test_task_raised(): + class DummyWorkflow(Workflow): + @step + async def step(self, ctx: Context, ev: StartEvent) -> StopEvent: + ctx.write_event_to_stream(OneTestEvent(test_param="foo")) + raise ValueError("The step raised an error!") + + wf = DummyWorkflow() + r = asyncio.create_task(wf.run()) + + # Make sure we don't block indefinitely here because the step raised + async for ev in wf.stream_events(): + assert ev.test_param == "foo" + + # Make sure the await actually caught the exception + with pytest.raises(ValueError, match="The step raised an error!"): + await r + + +@pytest.mark.asyncio() +async def test_multiple_streams(): + wf = StreamingWorkflow() + r = asyncio.create_task(wf.run()) + + # stream 1 + async for _ in wf.stream_events(): + pass + await r + + # stream 2 -- should not raise an error + r = asyncio.create_task(wf.run()) + async for _ in wf.stream_events(): + pass + await r diff --git a/llama-index-core/tests/workflow/test_workflow.py b/llama-index-core/tests/workflow/test_workflow.py index 00495e73e6977..d96006fc68d14 100644 --- a/llama-index-core/tests/workflow/test_workflow.py +++ b/llama-index-core/tests/workflow/test_workflow.py @@ -69,14 +69,55 @@ async def slow_step(self, ev: StartEvent) -> StopEvent: @pytest.mark.asyncio() -async def test_workflow_validation(): +async def test_workflow_validation_unproduced_events(): class InvalidWorkflow(Workflow): @step async def invalid_step(self, ev: StartEvent) -> None: pass workflow = InvalidWorkflow() - with pytest.raises(WorkflowValidationError): + with pytest.raises( + WorkflowValidationError, + match="The following events are consumed but never produced: StopEvent", + ): + await workflow.run() + + +@pytest.mark.asyncio() +async def test_workflow_validation_unconsumed_events(): + class InvalidWorkflow(Workflow): + @step + async def invalid_step(self, ev: StartEvent) -> OneTestEvent: + return OneTestEvent() + + @step + async def a_step(self, ev: StartEvent) -> StopEvent: + return StopEvent() + + workflow = InvalidWorkflow() + with pytest.raises( + WorkflowValidationError, + match="The following events are produced but never consumed: OneTestEvent", + ): + await workflow.run() + + +@pytest.mark.asyncio() +async def test_workflow_validation_start_event_not_consumed(): + class InvalidWorkflow(Workflow): + @step + async def a_step(self, ev: OneTestEvent) -> StopEvent: + return StopEvent() + + @step + async def another_step(self, ev: OneTestEvent) -> OneTestEvent: + return OneTestEvent() + + workflow = InvalidWorkflow() + with pytest.raises( + WorkflowValidationError, + match="The following events are produced but never consumed: StartEvent", + ): await workflow.run() @@ -101,7 +142,7 @@ async def step2(self, ev: OneTestEvent) -> StopEvent: @pytest.mark.asyncio() -async def test_sync_async_steps(): +async def test_workflow_sync_async_steps(): class SyncAsyncWorkflow(Workflow): @step async def async_step(self, ev: StartEvent) -> OneTestEvent: @@ -143,7 +184,7 @@ async def final_step( ev, [AnotherTestEvent] * ctx.data["num_to_collect"] ) if events is None: - return None + return None # type: ignore return StopEvent(result=[ev.another_test_param for ev in events]) workflow = NumWorkersWorkflow() @@ -168,7 +209,7 @@ class StepSendEventWorkflow(Workflow): @step async def step1(self, ctx: Context, ev: StartEvent) -> OneTestEvent: ctx.session.send_event(OneTestEvent(), step="step2") - return None + return None # type: ignore @step async def step2(self, ev: OneTestEvent) -> StopEvent: @@ -182,9 +223,9 @@ async def step3(self, ev: OneTestEvent) -> StopEvent: result = await workflow.run() assert result == "step2" assert workflow.is_done() - session = workflow._sessions.pop() - assert ("step2", "OneTestEvent") in session._accepted_events - assert ("step3", "OneTestEvent") not in session._accepted_events + ctx = workflow._contexts.pop() + assert ("step2", "OneTestEvent") in ctx._accepted_events + assert ("step3", "OneTestEvent") not in ctx._accepted_events @pytest.mark.asyncio() @@ -192,17 +233,40 @@ async def test_workflow_step_send_event_to_None(): class StepSendEventToNoneWorkflow(Workflow): @step async def step1(self, ctx: Context, ev: StartEvent) -> OneTestEvent: - ctx.session.send_event(OneTestEvent(), step=None) - return None + ctx.send_event(OneTestEvent(), step=None) + return # type:ignore @step async def step2(self, ev: OneTestEvent) -> StopEvent: return StopEvent(result="step2") - workflow = StepSendEventToNoneWorkflow() + workflow = StepSendEventToNoneWorkflow(verbose=True) await workflow.run() assert workflow.is_done() - assert ("step2", "OneTestEvent") in workflow._sessions.pop()._accepted_events + assert ("step2", "OneTestEvent") in workflow._contexts.pop()._accepted_events + + +@pytest.mark.asyncio() +async def test_workflow_step_returning_bogus(): + class TestWorkflow(Workflow): + @step + async def step1(self, ctx: Context, ev: StartEvent) -> OneTestEvent: + return "foo" # type:ignore + + @step + async def step2(self, ctx: Context, ev: StartEvent) -> OneTestEvent: + return OneTestEvent() + + @step + async def step3(self, ev: OneTestEvent) -> StopEvent: + return StopEvent(result="step2") + + workflow = TestWorkflow() + with pytest.warns( + UserWarning, + match="Step function step1 returned str instead of an Event instance.", + ): + await workflow.run() @pytest.mark.asyncio() @@ -238,17 +302,80 @@ async def step(self, ev: StartEvent) -> StopEvent: def test_deprecated_send_event(): ev = StartEvent() wf = Workflow() - session1 = mock.MagicMock() + ctx = mock.MagicMock() - # One session, assert step emits a warning - wf._sessions.add(session1) + # One context, assert step emits a warning + wf._contexts.add(ctx) with pytest.warns(UserWarning): wf.send_event(message=ev) - session1.send_event.assert_called_with(message=ev, step=None) + ctx.send_event.assert_called_with(message=ev, step=None) - # Second session, assert step raises an exception - session2 = mock.MagicMock() - wf._sessions.add(session2) + # Second context, assert step raises an exception + ctx = mock.MagicMock() + wf._contexts.add(ctx) with pytest.raises(WorkflowRuntimeError): wf.send_event(message=ev) - session2.send_event.assert_not_called() + ctx.send_event.assert_not_called() + + +def test_add_step(): + class TestWorkflow(Workflow): + @step + def foo_step(self, ev: StartEvent) -> None: + pass + + with pytest.raises( + WorkflowValidationError, + match="A step foo_step is already part of this workflow, please choose another name.", + ): + + @step(workflow=TestWorkflow) + def foo_step(ev: StartEvent) -> None: + pass + + +def test_add_step_not_a_step(): + class TestWorkflow(Workflow): + @step + def a_ste(self, ev: StartEvent) -> None: + pass + + def another_step(ev: StartEvent) -> None: + pass + + with pytest.raises( + WorkflowValidationError, + match="Step function another_step is missing the `@step` decorator.", + ): + TestWorkflow.add_step(another_step) + + +@pytest.mark.asyncio() +async def test_workflow_task_raises(): + class DummyWorkflow(Workflow): + @step + async def step(self, ev: StartEvent) -> StopEvent: + raise ValueError("The step raised an error!") + + workflow = DummyWorkflow() + with pytest.raises(ValueError, match="The step raised an error!"): + await workflow.run() + + +@pytest.mark.asyncio() +async def test_workflow_task_raises_step(): + class DummyWorkflow(Workflow): + @step + async def step(self, ev: StartEvent) -> StopEvent: + raise ValueError("The step raised an error!") + + workflow = DummyWorkflow() + with pytest.raises(ValueError, match="The step raised an error!"): + await workflow.run_step() + + +def test_workflow_disable_validation(): + w = Workflow(disable_validation=True) + w._get_steps = mock.MagicMock() + w._validate() + w._get_steps.assert_not_called() diff --git a/llama-index-experimental/llama_index/experimental/__init__.py b/llama-index-experimental/llama_index/experimental/__init__.py index 4186b4873b35d..06307e9ecb49b 100644 --- a/llama-index-experimental/llama_index/experimental/__init__.py +++ b/llama-index-experimental/llama_index/experimental/__init__.py @@ -2,5 +2,6 @@ PandasQueryEngine, ) from llama_index.experimental.param_tuner.base import ParamTuner +from llama_index.experimental.nudge.base import Nudge -__all__ = ["PandasQueryEngine", "ParamTuner"] +__all__ = ["PandasQueryEngine", "ParamTuner", "Nudge"] diff --git a/llama-index-experimental/llama_index/experimental/nudge/BUILD b/llama-index-experimental/llama_index/experimental/nudge/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-experimental/llama_index/experimental/nudge/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-experimental/llama_index/experimental/nudge/__init__.py b/llama-index-experimental/llama_index/experimental/nudge/__init__.py new file mode 100644 index 0000000000000..0fcc12ccec243 --- /dev/null +++ b/llama-index-experimental/llama_index/experimental/nudge/__init__.py @@ -0,0 +1,5 @@ +from llama_index.experimental.nudge.base import ( + Nudge, +) + +__all__ = ["Nudge"] diff --git a/llama-index-experimental/llama_index/experimental/nudge/base.py b/llama-index-experimental/llama_index/experimental/nudge/base.py new file mode 100644 index 0000000000000..ae8aeae88d8c3 --- /dev/null +++ b/llama-index-experimental/llama_index/experimental/nudge/base.py @@ -0,0 +1,197 @@ +import logging +from typing import Any, Optional + +from tqdm import tqdm, trange + +from llama_index.core.utils import print_text +from llama_index.core.base.embeddings.base import BaseEmbedding +from llama_index.core.utils import infer_torch_device +from llama_index.finetuning.embeddings.common import EmbeddingQAFinetuneDataset + +logger = logging.getLogger(__name__) + +IMPORT_ERROR_MSG = "PyTorch is not installed. Please install it with 'pip install torch' to use this functionality." + + +def multiclass_nll_loss(output, targets): + return (-1 * output * targets).sum(axis=-1).mean() + + +class Nudge: + """The algorithm implemented here and the current state of the art is called [NUDGE](https://www.arxiv.org/abs/2409.02343). + If a validation dataset is provided, the best model is evaluated and saved based on the validation loss at the end of every epoch. + + Args: + train_dataset (EmbeddingQAFinetuneDataset): Dataset to finetune on. + embed_model (BaseEmbedding): Embedding model. + val_dataset (Optional[EmbeddingQAFinetuneDataset]): Validation dataset. Defaults to None. + train_batch_size (Optional[int]): Train batch size. Defaults to 10. + val_batch_size (Optional[int]): Validation batch size. Defaults to 10. + epochs (Optional[int]): Number of epochs. Defaults to 1. + dim (Optional[int]): Dimension of embedding. Defaults to None. + device (Optional[str]): Device to use. Defaults to None. + model_output_path (str): Path to save model output. Defaults to "model_output". + model_checkpoint_path (Optional[str]): Path to save model checkpoints. + Defaults to None (don't save checkpoints). + verbose (bool): Whether to show progress bar. Defaults to False. + bias (bool): Whether to use bias. Defaults to False. + """ + + def __init__( + self, + train_dataset: EmbeddingQAFinetuneDataset, + embed_model: BaseEmbedding, + val_dataset: Optional[EmbeddingQAFinetuneDataset] = None, + train_batch_size: int = 10, + val_batch_size: int = 10, + epochs: int = 1, + device: Optional[str] = None, + verbose: bool = False, + ) -> None: + """Init params.""" + try: + import torch + except ImportError: + raise ImportError(IMPORT_ERROR_MSG) + + self.train_dataset = train_dataset + self.val_dataset = val_dataset + self.embed_model = embed_model + self.corpus_embeddings = self._get_corpus_embeddings(train_dataset) + + # load in data, run embedding model, define data loader + + self.train_batch_size = train_batch_size + self.val_batch_size = val_batch_size + self.train_loader = self._get_data_loader(train_dataset, train_batch_size) + self.val_loader = ( + self._get_data_loader(val_dataset, val_batch_size) + if val_dataset is not None + else None + ) + + if device is None: + device = infer_torch_device() + logger.info(f"Use pytorch device: {device}") + self._target_device = torch.device(device) + + self._epochs = epochs + + self._verbose = verbose + + def _get_data_loader( + self, dataset: EmbeddingQAFinetuneDataset, batch_size: int + ) -> Any: + """Get data loader.""" + try: + import torch + from torch.utils.data import DataLoader + except ImportError: + raise ImportError(IMPORT_ERROR_MSG) + + examples: Any = [] + + for query_id, query in dataset.queries.items(): + query_embedding = torch.tensor(self.embed_model.get_query_embedding(query)) + relevant_docs = dataset.relevant_docs[query_id] + relevant_docs = torch.tensor( + [1 if doc in relevant_docs else 0 for doc in dataset.corpus] + ) + + examples.append((query_embedding, relevant_docs)) + + return DataLoader(examples, batch_size=batch_size) + + def _get_corpus_embeddings(self, dataset: EmbeddingQAFinetuneDataset): + """Get corpus embeddings.""" + try: + import torch + except ImportError: + raise ImportError(IMPORT_ERROR_MSG) + + text_embeddings = [ + self.embed_model.get_text_embedding(text) + for text in dataset.corpus.values() + ] + return torch.tensor(text_embeddings, requires_grad=False) + + def _evaluate_acc(self, model, loader): + """Evaluate model.""" + try: + import torch + except ImportError: + raise ImportError(IMPORT_ERROR_MSG) + + model.eval() + total_acc = 0 + total_records = 0 + with torch.no_grad(): + for query_embeddings_t, relevant_docs_t in loader: + query_embeddings_t = query_embeddings_t.to(self._target_device) + relevant_docs_t = relevant_docs_t.to(self._target_device) + + preds = model(query_embeddings_t) + out = preds.max(1).indices.view(-1, 1) + truths = torch.gather(relevant_docs_t, 1, out) + + total_acc += truths.sum().item() + total_records += truths.shape[0] + return total_acc / total_records + + def finetune(self): + try: + import torch + from torch import nn + from torch.nn import functional as F + except ImportError: + raise ImportError(IMPORT_ERROR_MSG) + + # initialize the weights of a linear model with the normalized corpus embeddings + w_init = F.normalize(self.corpus_embeddings) + model = nn.Linear(w_init.shape[1], w_init.shape[0], bias=False) + model.weight.data = w_init + model.to(self._target_device) + + # train the model + optimizer = torch.optim.Adam( + model.parameters(), lr=1e-5, betas=(0.9, 0.999), eps=1e-8 + ) + best_val_acc = self._evaluate_acc(model, self.val_loader) + + for epoch in trange(self._epochs, desc="Epoch"): + model.train() + for query_embeddings_t, relevant_docs_t in tqdm( + self.train_loader, desc=f"Epoch {epoch+1}/{self._epochs}", leave=False + ): + query_embeddings_t = query_embeddings_t.to(self._target_device) + relevant_docs_t = relevant_docs_t.to(self._target_device) + + loss = multiclass_nll_loss(model(query_embeddings_t), relevant_docs_t) + + loss.backward() + optimizer.step() + optimizer.zero_grad() + + # normalize the weights + with torch.no_grad(): + model.weight.data = F.normalize(model.weight.data) + + if self._verbose: + print_text( + f"> [Epoch {epoch}] Current loss: {loss}\n", color="blue" + ) + if self.val_loader is not None: + val_acc = self._evaluate_acc(model, self.val_loader) + if self._verbose: + print_text( + f"> [Epoch {epoch}] validation acc: {val_acc} best validation acc: {best_val_acc} \n", + color="blue", + ) + if val_acc > best_val_acc: + best_val_acc = val_acc + self.corpus_embeddings = model.weight.data.clone() + else: + self.corpus_embeddings = model.weight.data.clone() + + def get_finetuned_corpus_embeddings(self): + return self.corpus_embeddings diff --git a/llama-index-experimental/pyproject.toml b/llama-index-experimental/pyproject.toml index b1fdf86cbe1aa..bf37f5a99c16d 100644 --- a/llama-index-experimental/pyproject.toml +++ b/llama-index-experimental/pyproject.toml @@ -25,11 +25,12 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-experimental" readme = "README.md" -version = "0.3.0" +version = "0.3.1" [tool.poetry.dependencies] -python = ">=3.8.1,<4.0" +python = ">=3.9,<4.0" llama-index-core = "^0.11.0" +llama-index-finetuning = "^0.2.0" pandas = "*" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/utils.py b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/utils.py index abba732307ed1..8d197ec729058 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/utils.py +++ b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/utils.py @@ -8,7 +8,7 @@ def resolve_tool_choice(tool_choice: Union[str, dict] = "auto") -> Union[str, di If tool_choice is a function name string, return the appropriate dict. """ - if isinstance(tool_choice, str) and tool_choice not in ["none", "auto"]: + if isinstance(tool_choice, str) and tool_choice not in ["none", "auto", "required"]: return {"type": "function", "function": {"name": tool_choice}} return tool_choice diff --git a/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml b/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml index f2fd35ec54515..0efa3d9ef9142 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml +++ b/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml @@ -28,7 +28,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-agent-openai" readme = "README.md" -version = "0.3.0" +version = "0.3.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-arize-phoenix/pyproject.toml b/llama-index-integrations/callbacks/llama-index-callbacks-arize-phoenix/pyproject.toml index b7f5754671069..f8bbec1c283f3 100644 --- a/llama-index-integrations/callbacks/llama-index-callbacks-arize-phoenix/pyproject.toml +++ b/llama-index-integrations/callbacks/llama-index-callbacks-arize-phoenix/pyproject.toml @@ -28,12 +28,12 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-callbacks-arize-phoenix" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<3.13" arize-phoenix = ">=3.0.3" -openinference-instrumentation-llama-index = ">=1.0.0" +openinference-instrumentation-llama-index = ">=3.0.0" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/.gitignore b/llama-index-integrations/callbacks/llama-index-callbacks-opik/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/BUILD b/llama-index-integrations/callbacks/llama-index-callbacks-opik/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/Makefile b/llama-index-integrations/callbacks/llama-index-callbacks-opik/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/README.md b/llama-index-integrations/callbacks/llama-index-callbacks-opik/README.md new file mode 100644 index 0000000000000..77c10d82093ea --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/README.md @@ -0,0 +1,31 @@ +# LlamaIndex Callbacks Integration: Opik + +```shell +pip install llama-index-callbacks-opik +``` + +This integration allows you to get one-click observability of your LlamaIndex RAG pipelines on [Opik](https://comet.com/site/products/opik/?utm_medium=docs&utm_source=llamaindex&utm_campaign=opik). + +The simplest way to get started and try out Opik is to signup on our [cloud instance](https://comet.com/signup?from=llm?utm_medium=docs&utm_source=llamaindex&utm_campaign=opik). +You can then get you API key from the quickstart page or the user menu and start logging ! + +You can initialize globally using + +```python +from llama_index.core import set_global_handler + +# You should provide your OPIK API key and Workspace using the following environment variables: +# OPIK_API_KEY, OPIK_WORKSPACE +set_global_handler("opik") +``` + +or: + +```python +from llama_index.core import Settings +from llama_index.core.callbacks import CallbackManager +from opik.integrations.llama_index import LlamaIndexCallbackHandler + +opik_callback_handler = LlamaIndexCallbackHandler() +Settings.callback_manager = CallbackManager([opik_callback_handler]) +``` diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/examples/BUILD b/llama-index-integrations/callbacks/llama-index-callbacks-opik/examples/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/examples/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/examples/opik_example.py b/llama-index-integrations/callbacks/llama-index-callbacks-opik/examples/opik_example.py new file mode 100644 index 0000000000000..980b6d14420b5 --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/examples/opik_example.py @@ -0,0 +1,22 @@ +from llama_index.core import Document, VectorStoreIndex, set_global_handler + +# You should provide your OPIK API key and Workspace using the following environment variables: +# OPIK_API_KEY, OPIK_WORKSPACE +set_global_handler( + "opik", +) + +# This example uses OpenAI by default so don't forget to set an OPENAI_API_KEY +index = VectorStoreIndex.from_documents([Document.example()]) +query_engine = index.as_query_engine() + +questions = [ + "Tell me about LLMs", + "How do you fine-tune a neural network ?", + "What is RAG ?", +] + +for question in questions: + print(f"> \033[92m{question}\033[0m") + response = query_engine.query(question) + print(response) diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/BUILD b/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/__init__.py b/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/__init__.py new file mode 100644 index 0000000000000..d84a466d00091 --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/__init__.py @@ -0,0 +1,3 @@ +from llama_index.callbacks.opik.base import opik_callback_handler + +__all__ = ["opik_callback_handler"] diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/base.py b/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/base.py new file mode 100644 index 0000000000000..996dcfcb9ee3a --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/llama_index/callbacks/opik/base.py @@ -0,0 +1,15 @@ +from typing import Any + +from llama_index.core.callbacks.base_handler import BaseCallbackHandler + + +def opik_callback_handler(**eval_params: Any) -> BaseCallbackHandler: + try: + from opik.integrations.llama_index import LlamaIndexCallbackHandler + + return LlamaIndexCallbackHandler(**eval_params) + + except ImportError: + raise ImportError( + "Please install the Opik Python SDK with `pip install -U opik`" + ) diff --git a/llama-index-integrations/callbacks/llama-index-callbacks-opik/pyproject.toml b/llama-index-integrations/callbacks/llama-index-callbacks-opik/pyproject.toml new file mode 100644 index 0000000000000..0fffe1616c3ff --- /dev/null +++ b/llama-index-integrations/callbacks/llama-index-callbacks-opik/pyproject.toml @@ -0,0 +1,53 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = true +import_path = "llama_index.callbacks.opik" + +[tool.llamahub.class_authors] +opik_callback_handler = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["Jacques VERRE "] +description = "llama-index callbacks Opik Comet integration" +license = "MIT" +name = "llama-index-callbacks-opik" +packages = [{include = "llama_index/"}] +readme = "README.md" +version = "1.0.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +black = {extras = ["jupyter"], version = "<=23.9.1,>=23.7.0"} +codespell = {extras = ["toml"], version = ">=v2.2.6"} +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" # TODO: unpin when mypy>0.991 +types-setuptools = "67.1.0.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/.gitignore b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/BUILD b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/Makefile b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/README.md b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/README.md new file mode 100644 index 0000000000000..61cbe2a2ba9ee --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/README.md @@ -0,0 +1,27 @@ +# LlamaIndex Embeddings Integration: Alibabacloud_Aisearch + +## Installation + +``` +pip install llama-index-embeddings-alibabacloud-aisearch +``` + +## Usage + +For further details, please visit [text-embedding-api-details](`https://help.aliyun.com/zh/open-search/search-platform/developer-reference/text-embedding-api-details`). + +You can specify the `endpoint` and `aisearch_api_key` in the constructor, or set the environment variables `AISEARCH_ENDPOINT` and `AISEARCH_API_KEY`. + +```python +from llama_index.embeddings.alibabacloud_aisearch import ( + AlibabaCloudAISearchEmbedding, +) + +embed_model = AlibabaCloudAISearchEmbedding() +embedding = embed_model.get_query_embedding("llama-index") +print(len(embedding)) + +# embeddings = embed_model.get_text_embedding_batch( +# ["科学技术是第一生产力", "opensearch产品文档"], show_progress=True +# ) +``` diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/BUILD b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/__init__.py b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/__init__.py new file mode 100644 index 0000000000000..cd664d97df374 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/__init__.py @@ -0,0 +1,6 @@ +from llama_index.embeddings.alibabacloud_aisearch.base import ( + AlibabaCloudAISearchEmbedding, +) + + +__all__ = ["AlibabaCloudAISearchEmbedding"] diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/base.py new file mode 100644 index 0000000000000..d7dbfab18cf97 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/llama_index/embeddings/alibabacloud_aisearch/base.py @@ -0,0 +1,182 @@ +import asyncio +import time +from typing import Any, List + +from llama_index.core.base.embeddings.base import BaseEmbedding +from llama_index.core.bridge.pydantic import Field, PrivateAttr + + +from llama_index.core.bridge.pydantic import Field, PrivateAttr + +from llama_index.core.base.llms.generic_utils import get_from_param_or_env + +try: + from alibabacloud_searchplat20240529.models import ( + GetTextEmbeddingRequest, + GetTextEmbeddingResponse, + ) + from alibabacloud_tea_openapi.models import Config as AISearchConfig + from alibabacloud_searchplat20240529.client import Client + from Tea.exceptions import TeaException +except ImportError: + raise ImportError( + "Could not import alibabacloud_searchplat20240529 python package. " + "Please install it with `pip install alibabacloud-searchplat20240529`." + ) + + +def retry_decorator(func, wait_seconds: int = 1): + def wrap(*args, **kwargs): + while True: + try: + return func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + time.sleep(wait_seconds) + else: + raise + + return wrap + + +def aretry_decorator(func, wait_seconds: int = 1): + async def wrap(*args, **kwargs): + while True: + try: + return await func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + await asyncio.sleep(wait_seconds) + else: + raise + + return wrap + + +class AlibabaCloudAISearchEmbedding(BaseEmbedding): + """ + For further details, please visit `https://help.aliyun.com/zh/open-search/search-platform/developer-reference/text-embedding-api-details`. + """ + + _client: Client = PrivateAttr() + + aisearch_api_key: str = Field(default=None, exclude=True) + endpoint: str = None + + service_id: str = "ops-text-embedding-002" + workspace_name: str = "default" + + def __init__( + self, endpoint: str = None, aisearch_api_key: str = None, **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.aisearch_api_key = get_from_param_or_env( + "aisearch_api_key", aisearch_api_key, "AISEARCH_API_KEY" + ) + self.endpoint = get_from_param_or_env("endpoint", endpoint, "AISEARCH_ENDPOINT") + + config = AISearchConfig( + bearer_token=self.aisearch_api_key, + endpoint=self.endpoint, + protocol="http", + ) + + self._client = Client(config=config) + + @classmethod + def class_name(cls) -> str: + return "AlibabaCloudAISearchEmbedding" + + @retry_decorator + def _get_embedding(self, text: str, input_type: str) -> List[float]: + request = GetTextEmbeddingRequest(input=text, input_type=input_type) + response: GetTextEmbeddingResponse = self._client.get_text_embedding( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + ) + embeddings = response.body.result.embeddings + return embeddings[0].embedding + + @aretry_decorator + async def _aget_embedding(self, text: str, input_type: str) -> List[float]: + request = GetTextEmbeddingRequest(input=text, input_type=input_type) + response: GetTextEmbeddingResponse = ( + await self._client.get_text_embedding_async( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + ) + ) + embeddings = response.body.result.embeddings + return embeddings[0].embedding + + @retry_decorator + def _get_embeddings(self, texts: List[str], input_type: str) -> List[List[float]]: + request = GetTextEmbeddingRequest(input=texts, input_type=input_type) + response: GetTextEmbeddingResponse = self._client.get_text_embedding( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + ) + embeddings = response.body.result.embeddings + return [emb.embedding for emb in embeddings] + + @aretry_decorator + async def _aget_embeddings( + self, + texts: List[str], + input_type: str, + ) -> List[List[float]]: + request = GetTextEmbeddingRequest(input=texts, input_type=input_type) + response: GetTextEmbeddingResponse = ( + await self._client.get_text_embedding_async( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + ) + ) + embeddings = response.body.result.embeddings + return [emb.embedding for emb in embeddings] + + def _get_query_embedding(self, query: str) -> List[float]: + """Get query embedding.""" + return self._get_embedding( + query, + input_type="query", + ) + + async def _aget_query_embedding(self, query: str) -> List[float]: + """The asynchronous version of _get_query_embedding.""" + return await self._aget_embedding( + query, + input_type="query", + ) + + def _get_text_embedding(self, text: str) -> List[float]: + """Get text embedding.""" + return self._get_embedding( + text, + input_type="document", + ) + + async def _aget_text_embedding(self, text: str) -> List[float]: + """The asynchronous version of _get_text_embedding.""" + return await self._aget_embedding( + text, + input_type="document", + ) + + def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]: + """Get text embeddings.""" + return self._get_embeddings( + texts, + input_type="document", + ) + + async def _aget_text_embeddings(self, texts: List[str]) -> List[List[float]]: + """The asynchronous version of _get_text_embeddings.""" + return await self._aget_embeddings( + texts, + input_type="document", + ) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/pyproject.toml new file mode 100644 index 0000000000000..ba85da1a24530 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.embeddings.alibabacloud_aisearch" + +[tool.llamahub.class_authors] +AlibabaCloudAISearchEmbedding = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["llama-index"] +description = "llama-index embeddings alibabacloud_aisearch integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-embeddings-alibabacloud-aisearch" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +alibabacloud-searchplat20240529 = "^1.1.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/BUILD b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-openai/llama_index/embeddings/__init__.py b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/__init__.py similarity index 100% rename from llama-index-integrations/embeddings/llama-index-embeddings-openai/llama_index/embeddings/__init__.py rename to llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/__init__.py diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/test_embeddings_alibabacloud_aisearch.py b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/test_embeddings_alibabacloud_aisearch.py new file mode 100644 index 0000000000000..358b3cadb1ad9 --- /dev/null +++ b/llama-index-integrations/embeddings/llama-index-embeddings-alibabacloud-aisearch/tests/test_embeddings_alibabacloud_aisearch.py @@ -0,0 +1,7 @@ +from llama_index.core.base.embeddings.base import BaseEmbedding +from llama_index.embeddings.alibabacloud_aisearch import AlibabaCloudAISearchEmbedding + + +def test_class(): + names_of_base_classes = [b.__name__ for b in AlibabaCloudAISearchEmbedding.__mro__] + assert BaseEmbedding.__name__ in names_of_base_classes diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/llama_index/embeddings/azure_inference/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/llama_index/embeddings/azure_inference/base.py index a2c381780a7fd..cb50ce044c04d 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/llama_index/embeddings/azure_inference/base.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/llama_index/embeddings/azure_inference/base.py @@ -1,5 +1,6 @@ """Azure AI model inference embeddings client.""" +import logging from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING from llama_index.core.base.embeddings.base import ( @@ -16,6 +17,9 @@ from azure.ai.inference import EmbeddingsClient from azure.ai.inference.aio import EmbeddingsClient as EmbeddingsClientAsync from azure.core.credentials import AzureKeyCredential +from azure.core.exceptions import HttpResponseError + +logger = logging.getLogger(__name__) class AzureAIEmbeddingsModel(BaseEmbedding): @@ -75,7 +79,7 @@ def __init__( callback_manager: Optional[CallbackManager] = None, num_workers: Optional[int] = None, client_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ): client_kwargs = client_kwargs or {} @@ -104,27 +108,43 @@ def __init__( "Pass the credential as a parameter or set the AZURE_INFERENCE_CREDENTIAL" ) - super().__init__( - model_name=model_name or "unknown", - embed_batch_size=embed_batch_size, - callback_manager=callback_manager, - num_workers=num_workers, - **kwargs, - ) - self._client = EmbeddingsClient( + client = EmbeddingsClient( endpoint=endpoint, credential=credential, user_agent="llamaindex", **client_kwargs, ) - self._async_client = EmbeddingsClientAsync( + async_client = EmbeddingsClientAsync( endpoint=endpoint, credential=credential, user_agent="llamaindex", **client_kwargs, ) + if not model_name: + try: + # Get model info from the endpoint. This method may not be supported by all + # endpoints. + model_info = client.get_model_info() + model_name = model_info.get("model_name", None) + except HttpResponseError: + logger.warning( + f"Endpoint '{self._client._config.endpoint}' does not support model metadata retrieval. " + "Unable to populate model attributes." + ) + + super().__init__( + model_name=model_name or "unknown", + embed_batch_size=embed_batch_size, + callback_manager=callback_manager, + num_workers=num_workers, + **kwargs, + ) + + self._client = client + self._async_client = async_client + @classmethod def class_name(cls) -> str: return "AzureAIEmbeddingsModel" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/pyproject.toml index 03f5f1236a0ba..2f7b99b472325 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/pyproject.toml @@ -28,7 +28,7 @@ license = "MIT" name = "llama-index-embeddings-azure-inference" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.1" +version = "0.2.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/tests/test_embeddings_azure_inference.py b/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/tests/test_embeddings_azure_inference.py index 458472cc51c08..6118d9f83355b 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/tests/test_embeddings_azure_inference.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-azure-inference/tests/test_embeddings_azure_inference.py @@ -28,3 +28,27 @@ def test_embed(): assert len(response) == len(nodes) assert response[0].embedding + + +@pytest.mark.skipif( + not { + "AZURE_INFERENCE_ENDPOINT", + "AZURE_INFERENCE_CREDENTIAL", + }.issubset(set(os.environ)), + reason="Azure AI endpoint and/or credential are not set.", +) +def test_get_metadata(caplog): + """Tests if we can get model metadata back from the endpoint. If so, + model_name should not be 'unknown'. Some endpoints may not support this + and in those cases a warning should be logged. + """ + # In case the endpoint being tested serves more than one model + model_name = os.environ.get("AZURE_INFERENCE_MODEL", None) + + embed_model = AzureAIEmbeddingsModel(model_name=model_name) + + assert ( + embed_model.model_name != "unknown" + or "does not support model metadata retrieval" in caplog.text + ) + assert not model_name or embed_model.model_name == model_name diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/llama_index/embeddings/azure_openai/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/llama_index/embeddings/azure_openai/base.py index bae81611c6d39..f654e8dfa2845 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/llama_index/embeddings/azure_openai/base.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/llama_index/embeddings/azure_openai/base.py @@ -1,7 +1,12 @@ -from typing import Any, Dict, Optional +from typing import Annotated, Any, Dict, Optional import httpx -from llama_index.core.bridge.pydantic import Field, PrivateAttr, model_validator +from llama_index.core.bridge.pydantic import ( + Field, + PrivateAttr, + WithJsonSchema, + model_validator, +) from llama_index.core.callbacks.base import CallbackManager from llama_index.core.constants import DEFAULT_EMBED_BATCH_SIZE from llama_index.core.base.llms.generic_utils import get_from_param_or_env @@ -17,6 +22,13 @@ from openai import AsyncAzureOpenAI, AzureOpenAI from openai.lib.azure import AzureADTokenProvider +# Used to serialized provider in schema +AnnotatedProvider = Annotated[ + AzureADTokenProvider, + WithJsonSchema({"type": "string"}, mode="serialization"), + WithJsonSchema({"type": "string"}, mode="validation"), +] + class AzureOpenAIEmbedding(OpenAIEmbedding): azure_endpoint: Optional[str] = Field( @@ -37,8 +49,10 @@ class AzureOpenAIEmbedding(OpenAIEmbedding): validate_default=True, ) - azure_ad_token_provider: Optional[AzureADTokenProvider] = Field( - default=None, description="Callback function to provide Azure AD token." + azure_ad_token_provider: Optional[AnnotatedProvider] = Field( + default=None, + description="Callback function to provide Azure AD token.", + exclude=True, ) use_azure_ad: bool = Field( description="Indicates if Microsoft Entra ID (former Azure AD) is used for token authentication" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/pyproject.toml index b7f813b5402f6..52ca28cdb58fb 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-azure-openai/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-embeddings-azure-openai" readme = "README.md" -version = "0.2.4" +version = "0.2.5" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-litellm/llama_index/embeddings/litellm/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-litellm/llama_index/embeddings/litellm/base.py index a373a43071121..3b468dc0362b5 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-litellm/llama_index/embeddings/litellm/base.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-litellm/llama_index/embeddings/litellm/base.py @@ -1,35 +1,51 @@ -from typing import List +from typing import Any, List, Optional from litellm import embedding from llama_index.core.bridge.pydantic import Field from llama_index.core.embeddings import BaseEmbedding -def get_embeddings(api_key: str, api_base: str, model_name: str, input: List[str]): - if not api_key: - # If key is not provided, we assume the consumer has configured - # their LiteLLM proxy server with their API key. - api_key = "some key" +def get_embeddings( + api_key: str, api_base: str, model_name: str, input: List[str], **kwargs: Any +) -> List[List[float]]: + """ + Retrieve embeddings for a given list of input strings using the specified model. + Args: + api_key (str): The API key for authentication. + api_base (str): The base URL of the LiteLLM proxy server. + model_name (str): The name of the model to use for generating embeddings. + input (List[str]): A list of input strings for which embeddings are to be generated. + **kwargs (Any): Additional keyword arguments to be passed to the embedding function. + + Returns: + List[List[float]]: A list of embeddings, where each embedding corresponds to an input string. + """ response = embedding( api_key=api_key, api_base=api_base, model=model_name, input=input, + **kwargs, ) return [result["embedding"] for result in response.data] class LiteLLMEmbedding(BaseEmbedding): - model_name: str = Field( - default="unknown", description="The name of the embedding model." - ) - api_key: str = Field( - default="unknown", + model_name: str = Field(description="The name of the embedding model.") + api_key: Optional[str] = Field( + default=None, description="OpenAI key. If not provided, the proxy server must be configured with the key.", ) - api_base: str = Field( - default="unknown", description="The base URL of the LiteLLM proxy." + api_base: Optional[str] = Field( + default=None, description="The base URL of the LiteLLM proxy." + ) + dimensions: Optional[int] = Field( + default=None, + description=( + "The number of dimensions the resulting output embeddings should have. " + "Only supported in text-embedding-3 and later models." + ), ) @classmethod @@ -47,6 +63,7 @@ def _get_query_embedding(self, query: str) -> List[float]: api_key=self.api_key, api_base=self.api_base, model_name=self.model_name, + dimensions=self.dimensions, input=[query], ) return embeddings[0] @@ -56,6 +73,7 @@ def _get_text_embedding(self, text: str) -> List[float]: api_key=self.api_key, api_base=self.api_base, model_name=self.model_name, + dimensions=self.dimensions, input=[text], ) return embeddings[0] @@ -65,5 +83,6 @@ def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]: api_key=self.api_key, api_base=self.api_base, model_name=self.model_name, + dimensions=self.dimensions, input=texts, ) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-litellm/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-litellm/pyproject.toml index 07213990820a3..01099fd174f33 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-litellm/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-litellm/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-embeddings-litellm" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/llama_index/embeddings/nvidia/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/llama_index/embeddings/nvidia/base.py index 57749b513f07a..6fe80b70b89b7 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/llama_index/embeddings/nvidia/base.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/llama_index/embeddings/nvidia/base.py @@ -12,7 +12,7 @@ from llama_index.core.base.llms.generic_utils import get_from_param_or_env from openai import OpenAI, AsyncOpenAI -from urllib.parse import urlunparse, urlparse +from urllib.parse import urlparse # integrate.api.nvidia.com is the default url for most models, any # bespoke endpoints will need to be added to the MODEL_ENDPOINT_MAP @@ -36,15 +36,14 @@ class Model(BaseModel): id: str + base_model: Optional[str] = None class NVIDIAEmbedding(BaseEmbedding): """NVIDIA embeddings.""" - model: str = Field( - default=DEFAULT_MODEL, + model: Optional[str] = Field( description="Name of the NVIDIA embedding model to use.\n" - "Defaults to 'NV-Embed-QA'.", ) truncate: Literal["NONE", "START", "END"] = Field( @@ -71,7 +70,7 @@ class NVIDIAEmbedding(BaseEmbedding): def __init__( self, - model: str = DEFAULT_MODEL, + model: Optional[str] = None, timeout: Optional[float] = 120, max_retries: Optional[int] = 5, nvidia_api_key: Optional[str] = None, @@ -119,10 +118,9 @@ def __init__( "NO_API_KEY_PROVIDED", ) - if ( - not base_url or base_url in KNOWN_URLS - ): # hosted on API Catalog (build.nvidia.com) - self._is_hosted = True + base_url = base_url or BASE_URL + self._is_hosted = base_url in KNOWN_URLS + if self._is_hosted: # hosted on API Catalog (build.nvidia.com) if api_key == "NO_API_KEY_PROVIDED": raise ValueError("An API key is required for hosted NIM.") # TODO: we should not assume unknown models are at the base url @@ -146,6 +144,30 @@ def __init__( ) self._aclient._custom_headers = {"User-Agent": "llama-index-embeddings-nvidia"} + if not model: + self.__get_default_model() + + def __get_default_model(self) -> None: + """Set default model.""" + if not self._is_hosted: + valid_models = [ + model.id + for model in self.available_models + if not model.base_model or model.base_model == model.id + ] + self.model = next(iter(valid_models), None) + if self.model: + warnings.warn( + f"Default model is set as: {self.model}. \n" + "Set model using model parameter. \n" + "To get available models use available_models property.", + UserWarning, + ) + else: + raise ValueError("No locally hosted model was found.") + else: + self.model = self.model or DEFAULT_MODEL + def _validate_url(self, base_url): """ Base URL Validation. @@ -156,27 +178,25 @@ def _validate_url(self, base_url): expected_format = "Expected format is 'http://host:port'." result = urlparse(base_url) if not (result.scheme and result.netloc): - raise ValueError( - f"Invalid base_url, Expected format is 'http://host:port': {base_url}" - ) - if result.path: - normalized_path = result.path.strip("/") - if normalized_path == "v1": - pass - elif normalized_path == "v1/embeddings": - warnings.warn(f"{expected_format} Rest is Ignored.") - else: - raise ValueError(f"Base URL path is not recognized. {expected_format}") - return urlunparse((result.scheme, result.netloc, "v1", "", "", "")) + raise ValueError(f"Invalid base_url, {expected_format}") + if base_url.endswith("embeddings"): + warnings.warn(f"{expected_format} Rest is ignored") + return base_url.strip("/") @property def available_models(self) -> List[Model]: """Get available models.""" - ids = MODEL_ENDPOINT_MAP.keys() # TODO: hosted now has a model listing, need to merge known and listed models if not self._is_hosted: - ids = [model.id for model in self._client.models.list()] - return [Model(id=id) for id in ids] + return [ + Model( + id=model.id, + base_model=getattr(model, "params", {}).get("root", None), + ) + for model in self._client.models.list() + ] + else: + return [Model(id=id) for id in MODEL_ENDPOINT_MAP] @classmethod def class_name(cls) -> str: diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/pyproject.toml index fe75c587b1358..ffd04f39180da 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/pyproject.toml @@ -61,6 +61,7 @@ extras = ["toml"] version = ">=v2.2.6" [tool.poetry.group.test_integration.dependencies] +pytest-httpx = "*" requests-mock = "^1.12.1" [[tool.poetry.packages]] diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_api_key.py b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_api_key.py index d2216ac996275..c81c72c53fa72 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_api_key.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_api_key.py @@ -6,6 +6,30 @@ from typing import Any +from pytest_httpx import HTTPXMock + + +@pytest.fixture() +def mock_local_models(httpx_mock: HTTPXMock): + mock_response = { + "data": [ + { + "id": "model1", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "model1", + } + ] + } + + httpx_mock.add_response( + url="https://test_url/v1/models", + method="GET", + json=mock_response, + status_code=200, + ) + def get_api_key(instance: Any) -> str: return instance._client.api_key @@ -17,6 +41,7 @@ def test_create_default_url_without_api_key(masked_env_var: str) -> None: assert "API key is required" in str(e.value) +@pytest.mark.usefixtures("mock_local_models") def test_create_unknown_url_without_api_key(masked_env_var: str) -> None: Interface(base_url="https://test_url/v1") @@ -39,6 +64,13 @@ def test_api_key_priority(masked_env_var: str) -> None: del os.environ["NVIDIA_API_KEY"] +@pytest.mark.integration() +def test_missing_api_key_error(masked_env_var: str) -> None: + with pytest.raises(ValueError) as err_msg: + Interface() + assert "An API key is required" in str(err_msg.value) + + @pytest.mark.integration() def test_bogus_api_key_error(masked_env_var: str) -> None: client = Interface(nvidia_api_key="BOGUS") diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_available_models.py b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_available_models.py index bdd95fe04185e..11fc2cefcdd20 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_available_models.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_available_models.py @@ -4,8 +4,8 @@ @pytest.mark.integration() -def test_available_models(mode: dict) -> None: - models = Interface(**mode).available_models +def test_available_models() -> None: + models = Interface().available_models assert models assert isinstance(models, list) assert all(isinstance(model.id, str) for model in models) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_base_url.py b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_base_url.py index 986054a495108..17a77bb8b09d7 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_base_url.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_base_url.py @@ -1,68 +1,58 @@ -from urllib.parse import urlparse, urlunparse - import pytest -from requests_mock import Mocker from llama_index.embeddings.nvidia import NVIDIAEmbedding as Interface +from pytest_httpx import HTTPXMock @pytest.fixture() -def mock_v1_local_models2(requests_mock: Mocker, base_url: str) -> None: - result = urlparse(base_url) - base_url = urlunparse((result.scheme, result.netloc, "v1", "", "", "")) - requests_mock.get( - f"{base_url}/models", - json={ - "data": [ - { - "id": "model1", - "object": "model", - "created": 1234567890, - "owned_by": "OWNER", - "root": "model1", - }, - ] - }, +def mock_local_models(httpx_mock: HTTPXMock, base_url: str): + mock_response = { + "data": [ + { + "id": "model1", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "model1", + } + ] + } + + httpx_mock.add_response( + url=f"{base_url}/models", + method="GET", + json=mock_response, + status_code=200, ) -# test case for invalid base_url +# test case for base_url warning @pytest.mark.parametrize( "base_url", [ - "localhost", - "localhost:8888", "http://localhost:8888/embeddings", - "http://0.0.0.0:8888/rankings", - "http://localhost:8888/chat/completions", - "http://test_url/.../v1", - "https://test_url/.../v1", ], ) -def test_base_url_invalid_not_hosted( - base_url: str, mock_v1_local_models2: None -) -> None: - with pytest.raises(ValueError): - Interface(base_url=base_url) +def test_base_url_invalid_not_hosted(base_url: str, mock_local_models) -> None: + with pytest.warns(UserWarning) as msg: + cls = Interface(base_url=base_url) + assert cls._is_hosted is False + assert len(msg) == 2 + assert "Expected format is " in str(msg[0].message) @pytest.mark.parametrize( "base_url", [ - "http://localhost:8080/v1/embeddings", + "http://localhost:8080/v1", ], ) -def test_base_url_valid_not_hosted(base_url: str, mock_v1_local_models2: None) -> None: +def test_base_url_valid_not_hosted(base_url: str, mock_local_models: None) -> None: with pytest.warns(UserWarning): - Interface(base_url=base_url) + cls = Interface(base_url=base_url) + assert cls._is_hosted is False + assert cls.model == "model1" -@pytest.mark.parametrize( - "base_url", - [ - "https://ai.api.nvidia.com/v1/retrieval/nvidia/", - "https://ai.api.nvidia.com/v1/retrieval/snowflake/arctic-embed-l", - "https://integrate.api.nvidia.com/v1/", - ], -) -def test_base_url_valid_hosted(base_url: str, mock_v1_local_models2: None) -> None: - Interface(api_key="BOGUS", base_url=base_url) +# @pytest.mark.parametrize("base_url", ["https://integrate.api.nvidia.com/v1/"]) +# def test_base_url_valid_hosted(base_url: str) -> None: +# Interface(base_url=base_url) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_embeddings_nvidia.py b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_embeddings_nvidia.py index 81bb863940991..38b0922e43edd 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_embeddings_nvidia.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-nvidia/tests/test_embeddings_nvidia.py @@ -7,6 +7,22 @@ from openai import AuthenticationError +from pytest_httpx import HTTPXMock + + +@pytest.fixture() +def mock_integration_api(httpx_mock: HTTPXMock): + BASE_URL = "https://integrate.api.nvidia.com/v1" + mock_response = {"object": "list", "data": [{"index": 0, "embedding": ""}]} + + httpx_mock.add_response( + method="POST", + url=f"{BASE_URL}/embeddings", + json=mock_response, + headers={"Content-Type": "application/json"}, + status_code=200, + ) + def test_embedding_class(): emb = NVIDIAEmbedding(api_key="BOGUS") @@ -56,7 +72,7 @@ def test_nvidia_embedding_async(): text_embs.close() -def test_nvidia_embedding_callback(): +def test_nvidia_embedding_callback(mock_integration_api): llama_debug = LlamaDebugHandler(print_trace_on_end=False) assert len(llama_debug.get_events()) == 0 @@ -71,7 +87,6 @@ def test_nvidia_embedding_callback(): assert len(llama_debug.get_events(CBEventType.EMBEDDING)) > 0 -def test_nvidia_embedding_throws_with_invalid_key(): +def test_nvidia_embedding_throws_with_invalid_key(mock_integration_api): emb = NVIDIAEmbedding(api_key="invalid") - with pytest.raises(AuthenticationError): - emb.get_text_embedding("hi") + emb.get_text_embedding("hi") diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-openai/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-openai/pyproject.toml index 1cf9f3d5f5a76..aeff16ebb0670 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-openai/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-openai/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-embeddings-openai" readme = "README.md" -version = "0.2.3" +version = "0.2.4" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/llama_index/embeddings/sagemaker_endpoint/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/llama_index/embeddings/sagemaker_endpoint/base.py index bfc657624af33..0f58667906dba 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/llama_index/embeddings/sagemaker_endpoint/base.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/llama_index/embeddings/sagemaker_endpoint/base.py @@ -86,6 +86,11 @@ def __init__( model_kwargs=model_kwargs, content_handler=content_handler, embed_batch_size=embed_batch_size, + profile_name=profile_name, + region_name=region_name, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, pydantic_program_mode=pydantic_program_mode, callback_manager=callback_manager, ) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/pyproject.toml index 72154476d16e0..bc0f43a7f55f6 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-sagemaker-endpoint/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-embeddings-sagemaker-endpoint" readme = "README.md" -version = "0.2.1" +version = "0.2.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/llama_index/embeddings/upstage/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/llama_index/embeddings/upstage/base.py index cc6a996f6199c..ef8787279f1fd 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/llama_index/embeddings/upstage/base.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/llama_index/embeddings/upstage/base.py @@ -51,7 +51,7 @@ class UpstageEmbedding(OpenAIEmbedding): default_factory=dict, description="Additional kwargs for the Upstage API." ) - api_key: str = Field(alias="upstage_api_key", description="The Upstage API key.") + api_key: str = Field(description="The Upstage API key.") api_base: Optional[str] = Field( default=DEFAULT_UPSTAGE_API_BASE, description="The base URL for Upstage API." ) @@ -127,14 +127,14 @@ def __init__( def class_name(cls) -> str: return "UpstageEmbedding" - def _get_credential_kwargs(self) -> Dict[str, Any]: + def _get_credential_kwargs(self, is_async: bool = False) -> Dict[str, Any]: return { "api_key": self.api_key, "base_url": self.api_base, "max_retries": self.max_retries, "timeout": self.timeout, "default_headers": self.default_headers, - "http_client": self._http_client, + "http_client": self._async_http_client if is_async else self._http_client, } def _get_query_embedding(self, query: str) -> List[float]: diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/pyproject.toml index 4aa9f3c03439d..a317c34f183d0 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/pyproject.toml @@ -30,7 +30,7 @@ license = "MIT" name = "llama-index-embeddings-upstage" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/integration_tests/test_integrations.py b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/integration_tests/test_integrations.py index ccf7e6cc3825d..4ad83129230bb 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/integration_tests/test_integrations.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/integration_tests/test_integrations.py @@ -1,6 +1,15 @@ import os import pytest +from pytest_mock import MockerFixture + +MOCK_EMBEDDING_DATA = [1.0, 2.0, 3.0] +UPSTAGE_TEST_API_KEY = "UPSTAGE_TEST_API_KEY" + + +@pytest.fixture() +def setup_environment(monkeypatch): + monkeypatch.setenv("UPSTAGE_API_KEY", UPSTAGE_TEST_API_KEY) @pytest.fixture() @@ -14,50 +23,101 @@ def upstage_embedding(): return UpstageEmbedding() -def test_upstage_embedding_query_embedding(upstage_embedding): +def test_upstage_embedding_query_embedding( + mocker: MockerFixture, setup_environment, upstage_embedding +): query = "hello" + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._get_query_embedding" + ) + mock_openai_client.return_value = MOCK_EMBEDDING_DATA + embedding = upstage_embedding.get_query_embedding(query) assert isinstance(embedding, list) -async def test_upstage_embedding_async_query_embedding(upstage_embedding): +async def test_upstage_embedding_async_query_embedding( + mocker: MockerFixture, setup_environment, upstage_embedding +): query = "hello" + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._aget_query_embedding" + ) + mock_openai_client.return_value = MOCK_EMBEDDING_DATA + embedding = await upstage_embedding.aget_query_embedding(query) assert isinstance(embedding, list) -def test_upstage_embedding_text_embedding(upstage_embedding): +def test_upstage_embedding_text_embedding( + mocker: MockerFixture, setup_environment, upstage_embedding +): text = "hello" + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._get_text_embedding" + ) + mock_openai_client.return_value = MOCK_EMBEDDING_DATA + embedding = upstage_embedding.get_text_embedding(text) assert isinstance(embedding, list) -async def test_upstage_embedding_async_text_embedding(upstage_embedding): +async def test_upstage_embedding_async_text_embedding( + mocker: MockerFixture, setup_environment, upstage_embedding +): text = "hello" + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._aget_text_embedding" + ) + mock_openai_client.return_value = MOCK_EMBEDDING_DATA + embedding = await upstage_embedding.aget_text_embedding(text) assert isinstance(embedding, list) -def test_upstage_embedding_text_embeddings(upstage_embedding): +def test_upstage_embedding_text_embeddings( + mocker: MockerFixture, setup_environment, upstage_embedding +): texts = ["hello", "world"] + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._get_text_embeddings" + ) + mock_openai_client.return_value = [MOCK_EMBEDDING_DATA] * len(texts) + embeddings = upstage_embedding.get_text_embedding_batch(texts) assert isinstance(embeddings, list) assert len(embeddings) == len(texts) assert all(isinstance(embedding, list) for embedding in embeddings) -def test_upstage_embedding_text_embeddings_fail_large_batch(): +def test_upstage_embedding_text_embeddings_fail_large_batch( + mocker: MockerFixture, setup_environment +): + large_batch_size = 2049 UpstageEmbedding = pytest.importorskip( "llama_index.embeddings.upstage", reason="Cannot import UpstageEmbedding" ).UpstageEmbedding - texts = ["hello"] * 2049 + + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._get_text_embeddings" + ) + mock_openai_client.return_value = [MOCK_EMBEDDING_DATA] * large_batch_size + + texts = ["hello"] * large_batch_size with pytest.raises(ValueError): upstage_embedding = UpstageEmbedding(embed_batch_size=2049) upstage_embedding.get_text_embedding_batch(texts) -async def test_upstage_embedding_async_text_embeddings(upstage_embedding): +async def test_upstage_embedding_async_text_embeddings( + mocker: MockerFixture, setup_environment, upstage_embedding +): texts = ["hello", "world"] + mock_openai_client = mocker.patch( + "llama_index.embeddings.upstage.base.UpstageEmbedding._aget_text_embeddings" + ) + mock_openai_client.return_value = [MOCK_EMBEDDING_DATA] * len(texts) + embeddings = await upstage_embedding.aget_text_embedding_batch(texts) assert isinstance(embeddings, list) assert len(embeddings) == len(texts) diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/unit_tests/test_embeddings_upstage.py b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/unit_tests/test_embeddings_upstage.py index 0e5e64f84bb31..f73d68cda1d8c 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/unit_tests/test_embeddings_upstage.py +++ b/llama-index-integrations/embeddings/llama-index-embeddings-upstage/tests/unit_tests/test_embeddings_upstage.py @@ -1,6 +1,8 @@ import pytest from llama_index.core.base.embeddings.base import BaseEmbedding +UPSTAGE_TEST_API_KEY = "upstage_test_key" + @pytest.fixture() def upstage_embedding(): @@ -9,6 +11,11 @@ def upstage_embedding(): ).UpstageEmbedding +@pytest.fixture() +def setup_environment(monkeypatch): + monkeypatch.setenv("UPSTAGE_API_KEY", UPSTAGE_TEST_API_KEY) + + def test_upstage_embedding_class(upstage_embedding): names_of_base_classes = [b.__name__ for b in upstage_embedding.__mro__] assert BaseEmbedding.__name__ in names_of_base_classes @@ -20,11 +27,15 @@ def test_upstage_embedding_fail_wrong_model(upstage_embedding): def test_upstage_embedding_api_key_alias(upstage_embedding): - api_key = "test_key" - embedding1 = upstage_embedding(api_key=api_key) - embedding2 = upstage_embedding(upstage_api_key=api_key) - embedding3 = upstage_embedding(error_api_key=api_key) + embedding1 = upstage_embedding(api_key=UPSTAGE_TEST_API_KEY) + embedding2 = upstage_embedding(upstage_api_key=UPSTAGE_TEST_API_KEY) + embedding3 = upstage_embedding(error_api_key=UPSTAGE_TEST_API_KEY) - assert embedding1.api_key == api_key - assert embedding2.api_key == api_key + assert embedding1.api_key == UPSTAGE_TEST_API_KEY + assert embedding2.api_key == UPSTAGE_TEST_API_KEY assert embedding3.api_key == "" + + +def test_upstage_embedding_api_key_with_env(setup_environment, upstage_embedding): + embedding = upstage_embedding() + assert embedding.api_key == UPSTAGE_TEST_API_KEY diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/llama_index/graph_stores/falkordb/falkordb_property_graph.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/llama_index/graph_stores/falkordb/falkordb_property_graph.py index cf163a319e48e..fb901116ffec7 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/llama_index/graph_stores/falkordb/falkordb_property_graph.py +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/llama_index/graph_stores/falkordb/falkordb_property_graph.py @@ -144,14 +144,14 @@ def refresh_schema(self) -> None: param_map={"EXCLUDED_LABELS": [*EXCLUDED_LABELS, BASE_ENTITY_LABEL]}, ) node_properties = ( - [el[b"output"] for el in node_query_results] if node_query_results else [] + [el["output"] for el in node_query_results] if node_query_results else [] ) rels_query_result = self.structured_query( rel_properties_query, param_map={"EXCLUDED_LABELS": EXCLUDED_RELS} ) rel_properties = ( - [el[b"output"] for el in rels_query_result] if rels_query_result else [] + [el["output"] for el in rels_query_result] if rels_query_result else [] ) rel_objs_query_result = self.structured_query( @@ -159,7 +159,7 @@ def refresh_schema(self) -> None: param_map={"EXCLUDED_LABELS": [*EXCLUDED_LABELS, BASE_ENTITY_LABEL]}, ) relationships = ( - [el[b"output"] for el in rel_objs_query_result] + [el["output"] for el in rel_objs_query_result] if rel_objs_query_result else [] ) @@ -251,7 +251,7 @@ def upsert_relations(self, relations: List[Relation]) -> None: MERGE (target {{id: $data.target_id}}) ON CREATE SET target:Chunk WITH source, target - CREATE (source)-[r:{param["label"]}]->(target) + CREATE (source)-[r:`{param["label"]}`]->(target) SET r += $data.properties RETURN count(*) """, @@ -296,21 +296,21 @@ def get( for record in response: # text indicates a chunk node # none on the type indicates an implicit node, likely a chunk node - if "text" in record[b"properties"] or record[b"type"] is None: - text = record[b"properties"].pop("text", "") + if "text" in record["properties"] or record["type"] is None: + text = record["properties"].pop("text", "") nodes.append( ChunkNode( - id_=record[b"name"], + id_=record["name"], text=text, - properties=remove_empty_values(record[b"properties"]), + properties=remove_empty_values(record["properties"]), ) ) else: nodes.append( EntityNode( - name=record[b"name"], - label=record[b"type"], - properties=remove_empty_values(record[b"properties"]), + name=record["name"], + label=record["type"], + properties=remove_empty_values(record["properties"]), ) ) @@ -373,19 +373,19 @@ def get_triplets( triples = [] for record in data: source = EntityNode( - name=record[b"source_id"], - label=record[b"source_type"], - properties=remove_empty_values(record[b"source_properties"]), + name=record["source_id"], + label=record["source_type"], + properties=remove_empty_values(record["source_properties"]), ) target = EntityNode( - name=record[b"target_id"], - label=record[b"target_type"], - properties=remove_empty_values(record[b"target_properties"]), + name=record["target_id"], + label=record["target_type"], + properties=remove_empty_values(record["target_properties"]), ) rel = Relation( - source_id=record[b"source_id"], - target_id=record[b"target_id"], - label=record[b"type"], + source_id=record["source_id"], + target_id=record["target_id"], + label=record["type"], ) triples.append([source, rel, target]) return triples @@ -432,23 +432,23 @@ def get_rel_map( ignore_rels = ignore_rels or [] for record in response: - if record[b"type"] in ignore_rels: + if record["type"] in ignore_rels: continue source = EntityNode( - name=record[b"source_id"], - label=record[b"source_type"], - properties=remove_empty_values(record[b"source_properties"]), + name=record["source_id"], + label=record["source_type"], + properties=remove_empty_values(record["source_properties"]), ) target = EntityNode( - name=record[b"target_id"], - label=record[b"target_type"], - properties=remove_empty_values(record[b"target_properties"]), + name=record["target_id"], + label=record["target_type"], + properties=remove_empty_values(record["target_properties"]), ) rel = Relation( - source_id=record[b"source_id"], - target_id=record[b"target_id"], - label=record[b"type"], + source_id=record["source_id"], + target_id=record["target_id"], + label=record["type"], ) triples.append([source, rel, target]) @@ -505,12 +505,12 @@ def vector_query( scores = [] for record in data: node = EntityNode( - name=record[b"name"], - label=record[b"type"], - properties=remove_empty_values(record[b"properties"]), + name=record["name"], + label=record["type"], + properties=remove_empty_values(record["properties"]), ) nodes.append(node) - scores.append(record[b"score"]) + scores.append(record["score"]) return (nodes, scores) diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/pyproject.toml b/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/pyproject.toml index 26ebc55c134b4..cdbb9807d8077 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/pyproject.toml +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-falkordb/pyproject.toml @@ -13,6 +13,7 @@ import_path = "llama_index.graph_stores.falkordb" [tool.llamahub.class_authors] FalkorDBGraphStore = "llama-index" +FalkorDBPropertyGraphStore = "llama-index" [tool.mypy] disallow_untyped_defs = true @@ -27,11 +28,11 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-graph-stores-falkordb" readme = "README.md" -version = "0.2.0" +version = "0.2.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -falkordb = "^1.0.4" +falkordb = "^1.0.8" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/__init__.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/__init__.py index 9163b615593cd..b915e23fd1395 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/__init__.py +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/__init__.py @@ -1,3 +1,6 @@ from llama_index.graph_stores.kuzu.base import KuzuGraphStore +from llama_index.graph_stores.kuzu.kuzu_property_graph import ( + KuzuPropertyGraphStore, +) -__all__ = ["KuzuGraphStore"] +__all__ = ["KuzuGraphStore", "KuzuPropertyGraphStore"] diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/kuzu_property_graph.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/kuzu_property_graph.py new file mode 100644 index 0000000000000..079fa6b6c81aa --- /dev/null +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/kuzu_property_graph.py @@ -0,0 +1,555 @@ +from typing import Any, List, Dict, Optional, Tuple +import kuzu +from llama_index.core.graph_stores.types import ( + PropertyGraphStore, + Triplet, + LabelledNode, + Relation, + EntityNode, + ChunkNode, +) +from llama_index.core.vector_stores.types import VectorStoreQuery +from llama_index.core.graph_stores.utils import value_sanitize +import llama_index.graph_stores.kuzu.utils as utils + +# Threshold for max number of returned triplets +LIMIT = 100 +Triple = Tuple[str, str, str] + + +class KuzuPropertyGraphStore(PropertyGraphStore): + """ + Kùzu Property Graph Store. + + This class implements a Kùzu property graph store. + + Kùzu can be installed and used with this simple command: + + ``` + pip install kuzu + ``` + """ + + def __init__( + self, + db: kuzu.Database, + relationship_schema: Optional[List[Tuple[str, str, str]]] = None, + has_structured_schema: Optional[bool] = False, + sanitize_query_output: Optional[bool] = True, + ) -> None: + self.db = db + self.connection = kuzu.Connection(self.db) + + if has_structured_schema: + if relationship_schema is None: + raise ValueError( + "Please provide a relationship schema if structured_schema=True." + ) + else: + self.validate_relationship_schema(relationship_schema) + else: + # Use a generic schema with node types of 'Entity' if no schema is required + relationship_schema = [("Entity", "LINKS", "Entity")] + + self.relationship_schema = relationship_schema + self.entities = self.get_entities() + self.has_structured_schema = has_structured_schema + self.entities.extend( + ["Chunk"] + ) # Always include Chunk as an entity type, in all schemas + self.sanitize_query_output = sanitize_query_output + self.structured_schema = {} + self.init_schema() + + def init_schema(self) -> None: + """Initialize schema if the required tables do not exist.""" + utils.create_chunk_node_table(self.connection) + utils.create_entity_node_tables(self.connection, entities=self.entities) + utils.create_relation_tables( + self.connection, + self.entities, + relationship_schema=self.relationship_schema, + ) + + def validate_relationship_schema(self, relationship_schema: List[Triple]) -> None: + # Check that validation schema is a list of tuples as required by Kùzu for relationships + if not all(isinstance(item, tuple) for item in relationship_schema): + raise ValueError( + "Please specify the relationship schema as " + "a list of tuples, for example: [('PERSON', 'IS_CEO_OF', 'ORGANIZATION')]" + ) + + @property + def client(self) -> kuzu.Connection: + return self.connection + + def get_entities(self) -> List[str]: + return sorted( + set( + [rel[0] for rel in self.relationship_schema] + + [rel[2] for rel in self.relationship_schema] + ) + ) + + def upsert_nodes(self, nodes: List[LabelledNode]) -> None: + entity_list: List[EntityNode] = [] + chunk_list: List[ChunkNode] = [] + node_tables = self.connection._get_node_table_names() + + for item in nodes: + if isinstance(item, EntityNode): + entity_list.append(item) + elif isinstance(item, ChunkNode): + chunk_list.append(item) + + for chunk in chunk_list: + upsert_chunk_node_query = """ + MERGE (c:Chunk {id: $id}) + SET c.text = $text, + c.label = $label, + c.embedding = $embedding, + c.ref_doc_id = $ref_doc_id, + c.creation_date = date($creation_date), + c.last_modified_date = date($last_modified_date), + c.file_name = $file_name, + c.file_path = $file_path, + c.file_size = $file_size, + c.file_type = $file_type + """ + + self.connection.execute( + upsert_chunk_node_query, + parameters={ + "id": chunk.id_, + "text": chunk.text.strip(), + "label": chunk.label, + "embedding": chunk.embedding, + "ref_doc_id": chunk.properties.get("ref_doc_id"), + "creation_date": chunk.properties.get("creation_date"), + "last_modified_date": chunk.properties.get("last_modified_date"), + "file_name": chunk.properties.get("file_name"), + "file_path": chunk.properties.get("file_path"), + "file_size": chunk.properties.get("file_size"), + "file_type": chunk.properties.get("file_type"), + }, + ) + + for entity in entity_list: + entity_label = entity.label if entity.label in node_tables else "Entity" + upsert_entity_node_query = f""" + MERGE (e:{entity_label} {{id: $id}}) + SET e.label = $label, + e.name = $name, + e.embedding = $embedding, + e.creation_date = date($creation_date), + e.last_modified_date = date($last_modified_date), + e.file_name = $file_name, + e.file_path = $file_path, + e.file_size = $file_size, + e.file_type = $file_type, + e.triplet_source_id = $triplet_source_id + """ + + self.connection.execute( + upsert_entity_node_query, + parameters={ + "id": entity.name, + "label": entity.label, + "name": entity.name, + "embedding": entity.embedding, + "creation_date": entity.properties.get("creation_date"), + "last_modified_date": entity.properties.get("last_modified_date"), + "file_name": entity.properties.get("file_name"), + "file_path": entity.properties.get("file_path"), + "file_size": entity.properties.get("file_size"), + "file_type": entity.properties.get("file_type"), + "triplet_source_id": entity.properties.get("triplet_source_id"), + }, + ) + + def upsert_relations(self, relations: List[Relation]) -> None: + for rel in relations: + if self.has_structured_schema: + src, _, dst = utils.lookup_relation(rel.label, self.relationship_schema) + else: + src, dst = "Entity", "Entity" + + rel_tbl_name = f"LINKS_{src}_{dst}" + # Connect entities to each other + self.connection.execute( + f""" + MATCH (a:{src} {{id: $source_id}}), + (b:{dst} {{id: $target_id}}) + MERGE (a)-[r:{rel_tbl_name} {{label: $label}}]->(b) + SET r.triplet_source_id = $triplet_source_id + """, + parameters={ + "source_id": rel.source_id, + "target_id": rel.target_id, + "triplet_source_id": rel.properties.get("triplet_source_id"), + "label": rel.label, + }, + ) + # Connect chunks to entities + self.connection.execute( + f""" + MATCH (a:{src} {{id: $source_id}}), + (b:{dst} {{id: $target_id}}), + (c:Chunk {{id: $triplet_source_id}}) + MERGE (c)-[:LINKS_Chunk_{src} {{label: "MENTIONS"}}]->(a) + MERGE (c)-[:LINKS_Chunk_{dst} {{label: "MENTIONS"}}]->(b) + """, + parameters={ + "source_id": rel.source_id, + "target_id": rel.target_id, + "triplet_source_id": rel.properties.get("triplet_source_id"), + }, + ) + + def structured_query( + self, query: str, param_map: Optional[Dict[str, Any]] = None + ) -> Any: + response = self.connection.execute(query, parameters=param_map) + column_names = response.get_column_names() + result = [] + while response.has_next(): + row = response.get_next() + result.append(dict(zip(column_names, row))) + + if self.sanitize_query_output: + return value_sanitize(result) + + return result + + def vector_query( + self, query: VectorStoreQuery, **kwargs: Any + ) -> Tuple[List[LabelledNode], List[float]]: + raise NotImplementedError( + "Vector query is not currently implemented for KuzuPropertyGraphStore." + ) + + def get( + self, + properties: Optional[dict] = None, + ids: Optional[List[str]] = None, + ) -> List[LabelledNode]: + """Get nodes from the property graph store.""" + cypher_statement = "MATCH (e) " + + parameters = {} + if ids: + cypher_statement += "WHERE e.id in $ids " + parameters["ids"] = ids + + return_statement = "RETURN e.*" + cypher_statement += return_statement + result = self.structured_query(cypher_statement, param_map=parameters) + result = result if result else [] + + nodes = [] + for record in result: + # Text indicates a chunk node + # None on the label indicates an implicit node, likely a chunk node + if record.get("e.label") == "text_chunk": + properties = { + k: v for k, v in record.items() if k not in ["e.id", "e.text"] + } + text = record.get("e.text") + nodes.append( + ChunkNode( + id_=record["e.id"], + text=text, + properties=utils.remove_empty_values(properties), + ) + ) + else: + properties = { + k: v for k, v in record.items() if k not in ["e.id", "e.name"] + } + name = record["e.name"] if record.get("e.name") else record["e.id"] + label = record["e.label"] if record.get("e.label") else "Chunk" + nodes.append( + EntityNode( + name=name, + label=label, + properties=utils.remove_empty_values(properties), + ) + ) + return nodes + + def get_triplets( + self, + entity_names: Optional[List[str]] = None, + relation_names: Optional[List[str]] = None, + ids: Optional[List[str]] = None, + ) -> List[Triplet]: + # Construct the Cypher query + cypher_statement = "MATCH (e)-[r]->(t) " + + params = {} + if entity_names or relation_names or ids: + cypher_statement += "WHERE " + + if entity_names: + cypher_statement += "e.name in $entity_names " + params["entity_names"] = entity_names + + if relation_names and entity_names: + cypher_statement += f"AND " + if relation_names: + cypher_statement += "r.label in $relation_names " + params[f"relation_names"] = relation_names + + if ids: + cypher_statement += "e.id in $ids " + params["ids"] = ids + + # Avoid returning a massive list of triplets that represent a large portion of the graph + # This uses the LIMIT constant defined at the top of the file + if not (entity_names or relation_names or ids): + return_statement = f"WHERE e.label <> 'text_chunk' RETURN * LIMIT {LIMIT};" + else: + return_statement = f"AND e.label <> 'text_chunk' RETURN * LIMIT {LIMIT};" + + cypher_statement += return_statement + + result = self.structured_query(cypher_statement, param_map=params) + result = result if result else [] + + triples = [] + for record in result: + if record["e"]["_label"] == "Chunk": + continue + + src_table = record["e"]["_id"]["table"] + dst_table = record["t"]["_id"]["table"] + id_map = {src_table: record["e"]["id"], dst_table: record["t"]["id"]} + source = EntityNode( + name=record["e"]["id"], + label=record["e"]["_label"], + properties=utils.get_filtered_props(record["e"], ["_id", "_label"]), + ) + target = EntityNode( + name=record["t"]["id"], + label=record["t"]["_label"], + properties=utils.get_filtered_props(record["t"], ["_id", "_label"]), + ) + rel = Relation( + source_id=id_map.get(record["r"]["_src"]["table"], "unknown"), + target_id=id_map.get(record["r"]["_dst"]["table"], "unknown"), + label=record["r"]["label"], + ) + triples.append([source, rel, target]) + return triples + + def get_rel_map( + self, + graph_nodes: List[LabelledNode], + depth: int = 2, + limit: int = 30, + ignore_rels: Optional[List[str]] = None, + ) -> List[Triplet]: + triples = [] + + ids = [node.id for node in graph_nodes] + if len(ids) > 0: + # Run recursive query + response = self.structured_query( + f""" + MATCH (e) + WHERE e.id IN $ids + MATCH (e)-[rel*1..{depth} (r, n | WHERE r.label <> "MENTIONS") ]->(other) + RETURN * + LIMIT {limit}; + """, + param_map={"ids": ids}, + ) + else: + response = self.structured_query( + f""" + MATCH (e) + MATCH (e)-[rel*1..{depth} (r, n | WHERE r.label <> "MENTIONS") ]->(other) + RETURN * + LIMIT {limit}; + """ + ) + + ignore_rels = ignore_rels or [] + for record in response: + for item in record["rel"]["_rels"]: + if item["label"] in ignore_rels: + continue + + src_table = item["_src"]["table"] + dst_table = item["_src"]["table"] + id_map = { + src_table: record["e"]["_id"], + dst_table: record["other"]["id"], + } + source = EntityNode( + name=record["e"]["name"], + label=record["e"]["_label"], + properties=utils.get_filtered_props( + record["e"], ["_id", "name", "_label"] + ), + ) + target = EntityNode( + name=record["other"]["name"], + label=record["other"]["_label"], + properties=utils.get_filtered_props( + record["e"], ["_id", "name", "_label"] + ), + ) + rel = Relation( + source_id=id_map.get(item["_src"]["table"], "unknown"), + target_id=id_map.get(item["_dst"]["table"], "unknown"), + label=item["label"], + ) + triples.append([source, rel, target]) + + return triples + + def delete( + self, + entity_names: Optional[List[str]] = None, + relation_names: Optional[List[str]] = None, + properties: Optional[dict] = None, + ids: Optional[List[str]] = None, + ) -> None: + """Delete nodes and relationships from the property graph store.""" + if entity_names: + self.structured_query( + "MATCH (n) WHERE n.name IN $entity_names DETACH DELETE n", + param_map={"entity_names": entity_names}, + ) + + if ids: + self.structured_query( + "MATCH (n) WHERE n.id IN $ids DETACH DELETE n", + param_map={"ids": ids}, + ) + + if relation_names: + for rel in relation_names: + src, _, dst = utils.lookup_relation(rel, self.relationship_schema) + self.structured_query( + f""" + MATCH (:{src})-[r {{label: $label}}]->(:{dst}) + DELETE r + """, + param_map={"label": rel}, + ) + + if properties: + assert isinstance( + properties, dict + ), "`properties` should be a key-value mapping." + cypher = "MATCH (e) WHERE " + prop_list = [] + params = {} + for i, prop in enumerate(properties): + prop_list.append(f"e.`{prop}` = $property_{i}") + params[f"property_{i}"] = properties[prop] + cypher += " AND ".join(prop_list) + self.structured_query(cypher + " DETACH DELETE e", param_map=params) + + def get_schema(self) -> Any: + """ + Returns a structured schema of the property graph store. + + The schema contains `node_props`, `rel_props`, and `relationships` keys and + the associated metadata. + Example output: + { + 'node_props': {'Chunk': [{'property': 'id', 'type': 'STRING'}, + {'property': 'text', 'type': 'STRING'}, + {'property': 'label', 'type': 'STRING'}, + {'property': 'embedding', 'type': 'DOUBLE'}, + {'property': 'properties', 'type': 'STRING'}, + {'property': 'ref_doc_id', 'type': 'STRING'}], + 'Entity': [{'property': 'id', 'type': 'STRING'}, + {'property': 'name', 'type': 'STRING'}, + {'property': 'label', 'type': 'STRING'}, + {'property': 'embedding', 'type': 'DOUBLE'}, + {'property': 'properties', 'type': 'STRING'}]}, + 'rel_props': {'SOURCE': [{'property': 'label', 'type': 'STRING'}]}, + 'relationships': [{'end': 'Chunk', 'start': 'Chunk', 'type': 'SOURCE'}] + } + """ + current_table_schema = {"node_props": {}, "rel_props": {}, "relationships": []} + node_tables = self.connection._get_node_table_names() + for table_name in node_tables: + node_props = self.connection._get_node_property_names(table_name) + current_table_schema["node_props"][table_name] = [] + for prop, attr in node_props.items(): + schema = {} + schema["property"] = prop + schema["type"] = attr["type"] + current_table_schema["node_props"][table_name].append(schema) + + rel_tables = self.connection._get_rel_table_names() + for i, table in enumerate(rel_tables): + table_name = table["name"] + prop_values = self.connection.execute( + f"MATCH ()-[r:{table_name}]->() RETURN distinct r.label AS label;" + ) + while prop_values.has_next(): + rel_label = prop_values.get_next()[0] + src, dst = rel_tables[i]["src"], rel_tables[i]["dst"] + current_table_schema["relationships"].append( + {"start": src, "type": rel_label, "end": dst} + ) + current_table_schema["rel_props"][rel_label] = [] + table_details = self.connection.execute( + f"CALL TABLE_INFO('{table_name}') RETURN *;" + ) + while table_details.has_next(): + props = table_details.get_next() + rel_props = {} + rel_props["property"] = props[1] + rel_props["type"] = props[2] + current_table_schema["rel_props"][rel_label].append(rel_props) + + self.structured_schema = current_table_schema + + return self.structured_schema + + def get_schema_str(self) -> str: + schema = self.get_schema() + + formatted_node_props = [] + formatted_rel_props = [] + + # Format node properties + for label, props in schema["node_props"].items(): + props_str = ", ".join( + [f"{prop['property']}: {prop['type']}" for prop in props] + ) + formatted_node_props.append(f"{label} {{{props_str}}}") + + # Format relationship properties + for type, props in schema["rel_props"].items(): + props_str = ", ".join( + [f"{prop['property']}: {prop['type']}" for prop in props] + ) + formatted_rel_props.append(f"{type} {{{props_str}}}") + + # Format relationships + formatted_rels = [ + f"(:{rel['start']})-[:{rel['type']}]->(:{rel['end']})" + for rel in schema["relationships"] + ] + + return "\n".join( + [ + "Node properties:", + "\n".join(formatted_node_props), + "Relationship properties:", + "\n".join(formatted_rel_props), + "The relationships:", + "\n".join(formatted_rels), + ] + ) + + +KuzuPGStore = KuzuPropertyGraphStore diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/utils.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/utils.py new file mode 100644 index 0000000000000..e3e8317b35937 --- /dev/null +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/llama_index/graph_stores/kuzu/utils.py @@ -0,0 +1,129 @@ +from typing import List, _LiteralGenericAlias, get_args, Tuple +import kuzu + +Triple = Tuple[str, str, str] + + +def create_fresh_database(db: str) -> None: + """ + Create a new Kùzu database by removing existing database directory and its contents. + """ + import shutil + + shutil.rmtree(db, ignore_errors=True) + + +def get_list_from_literal(literal: _LiteralGenericAlias) -> List[str]: + """ + Get a list of strings from a Literal type. + + Parameters: + literal (_LiteralGenericAlias): The Literal type from which to extract the strings. + + Returns: + List[str]: A list of strings extracted from the Literal type. + """ + if not isinstance(literal, _LiteralGenericAlias): + raise TypeError( + f"{literal} must be a Literal type.\nTry using typing.Literal{literal}." + ) + return list(get_args(literal)) + + +def remove_empty_values(input_dict): + """ + Remove entries with empty values from the dictionary. + + Parameters: + input_dict (dict): The dictionary from which empty values need to be removed. + + Returns: + dict: A new dictionary with all empty values removed. + """ + # Create a new dictionary excluding empty values and remove the `e.` prefix from the keys + return {key.replace("e.", ""): value for key, value in input_dict.items() if value} + + +def get_filtered_props(records: dict, filter_list: List[str]) -> dict: + return {k: v for k, v in records.items() if k not in filter_list} + + +# Lookup entry by middle value of tuple +def lookup_relation(relation: str, triples: List[Triple]) -> Triple: + """ + Look up a triple in a list of triples by the middle value. + """ + for triple in triples: + if triple[1] == relation: + return triple + return None + + +def create_chunk_node_table(connection: kuzu.Connection) -> None: + # For now, the additional `properties` dict from LlamaIndex is stored as a string + # TODO: See if it makes sense to add better support for property metadata as columns + if "Chunk" not in connection._get_node_table_names(): + connection.execute( + f""" + CREATE NODE TABLE Chunk ( + id STRING, + text STRING, + label STRING, + embedding DOUBLE[], + creation_date DATE, + last_modified_date DATE, + file_name STRING, + file_path STRING, + file_size INT64, + file_type STRING, + ref_doc_id STRING, + PRIMARY KEY(id) + ) + """ + ) + + +def create_entity_node_tables(connection: kuzu.Connection, entities: List[str]) -> None: + for tbl_name in entities: + # For now, the additional `properties` dict from LlamaIndex is stored as a string + # TODO: See if it makes sense to add better support for property metadata as columns + if tbl_name not in connection._get_node_table_names(): + connection.execute( + f""" + CREATE NODE TABLE {tbl_name} ( + id STRING, + name STRING, + label STRING, + embedding DOUBLE[], + creation_date DATE, + last_modified_date DATE, + file_name STRING, + file_path STRING, + file_size INT64, + file_type STRING, + triplet_source_id STRING, + PRIMARY KEY(id) + ) + """ + ) + + +def create_relation_tables( + connection: kuzu.Connection, entities: List[str], relationship_schema: List[Triple] +) -> None: + rel_tables = [tbl["name"] for tbl in connection._get_rel_table_names()] + # We use Kùzu relationship table group creation DDL commands to create relationship tables + ddl = "" + if not any("LINKS" in table for table in rel_tables): + ddl = "CREATE REL TABLE GROUP LINKS (" + table_names = [] + for src, _, dst in relationship_schema: + table_names.append(f"FROM {src} TO {dst}") + for entity in entities: + table_names.append(f"FROM Chunk TO {entity}") + table_names = list(set(table_names)) + ddl += ", ".join(table_names) + # Add common properties for all the tables here + ddl += ", label STRING, triplet_source_id STRING)" + if ddl: + connection.execute(ddl) diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/pyproject.toml b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/pyproject.toml index b2a5d3426fc59..d01490cf16b7a 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/pyproject.toml +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/pyproject.toml @@ -13,6 +13,7 @@ import_path = "llama_index.graph_stores.kuzu" [tool.llamahub.class_authors] KuzuGraphStore = "llama-index" +KuzuPropertyGraphStore = "llama-index" [tool.mypy] disallow_untyped_defs = true @@ -27,11 +28,11 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-graph-stores-kuzu" readme = "README.md" -version = "0.2.0" +version = "0.3.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -kuzu = "^0.4.0" +kuzu = "^0.6.0" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/tests/test_pg_stores_kuzu.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/tests/test_pg_stores_kuzu.py new file mode 100644 index 0000000000000..3b449206c87e7 --- /dev/null +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-kuzu/tests/test_pg_stores_kuzu.py @@ -0,0 +1,84 @@ +import shutil +import pytest + +from llama_index.graph_stores.kuzu import KuzuPropertyGraphStore +from llama_index.core.graph_stores.types import Relation, EntityNode +from llama_index.core.schema import TextNode + + +@pytest.fixture() +def pg_store() -> KuzuPropertyGraphStore: + import kuzu + + shutil.rmtree("llama_test_db", ignore_errors=True) + db = kuzu.Database("llama_test_db") + pg_store = KuzuPropertyGraphStore(db) + pg_store.structured_query("MATCH (n) DETACH DELETE n") + return pg_store + + +def test_kuzudb_pg_store(pg_store: KuzuPropertyGraphStore) -> None: + # Create a two entity nodes + entity1 = EntityNode(label="PERSON", name="Logan") + entity2 = EntityNode(label="ORGANIZATION", name="LlamaIndex") + + # Create a relation + relation = Relation( + label="WORKS_FOR", + source_id=entity1.id, + target_id=entity2.id, + ) + + pg_store.upsert_nodes([entity1, entity2]) + pg_store.upsert_relations([relation]) + + source_node = TextNode(text="Logan (age 28), works for LlamaIndex since 2023.") + relations = [ + Relation( + label="MENTIONS", + target_id=entity1.id, + source_id=source_node.node_id, + ), + Relation( + label="MENTIONS", + target_id=entity2.id, + source_id=source_node.node_id, + ), + ] + + pg_store.upsert_llama_nodes([source_node]) + pg_store.upsert_relations(relations) + + print(pg_store.get()) + + kg_nodes = pg_store.get(ids=[entity1.id]) + assert len(kg_nodes) == 1 + assert kg_nodes[0].label == "PERSON" + assert kg_nodes[0].name == "Logan" + + # get paths from a node + paths = pg_store.get_rel_map(kg_nodes, depth=1) + for path in paths: + assert path[0].id == entity1.id + assert path[2].id == entity2.id + assert path[1].id == relation.id + + query = "match (n:Entity) return n" + result = pg_store.structured_query(query) + assert len(result) == 2 + + # deleting + # delete our entities + pg_store.delete(ids=[entity1.id, entity2.id]) + + # delete our text nodes + pg_store.delete(ids=[source_node.node_id]) + + nodes = pg_store.get(ids=[entity1.id, entity2.id]) + assert len(nodes) == 0 + + text_nodes = pg_store.get_llama_nodes([source_node.node_id]) + assert len(text_nodes) == 0 + + # Delete the database + shutil.rmtree("llama_test_db", ignore_errors=True) diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/base.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/base.py index c1ca13b105600..db6b31ef7b0d8 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/base.py +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/base.py @@ -249,7 +249,33 @@ def get_schema(self, refresh: bool = False) -> str: logger.debug(f"get_schema() schema:\n{self.schema}") return self.schema - def query(self, query: str, param_map: Optional[Dict[str, Any]] = {}) -> Any: - with self._driver.session(database=self._database) as session: - result = session.run(query, param_map) - return [d.data() for d in result] + def query(self, query: str, param_map: Optional[Dict[str, Any]] = None) -> Any: + param_map = param_map or {} + try: + data, _, _ = self._driver.execute_query( + query, database=self._database, parameters_=param_map + ) + return [r.data() for r in data] + except neo4j.exceptions.Neo4jError as e: + if not ( + ( + ( # isCallInTransactionError + e.code == "Neo.DatabaseError.Statement.ExecutionFailed" + or e.code + == "Neo.DatabaseError.Transaction.TransactionStartFailed" + ) + and "in an implicit transaction" in e.message + ) + or ( # isPeriodicCommitError + e.code == "Neo.ClientError.Statement.SemanticError" + and ( + "in an open transaction is not possible" in e.message + or "tried to execute in an explicit transaction" in e.message + ) + ) + ): + raise + # Fallback to allow implicit transactions + with self._driver.session() as session: + data = session.run(neo4j.Query(text=query), param_map) + return [r.data() for r in data] diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/neo4j_property_graph.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/neo4j_property_graph.py index bebc0d4ad7f46..1c71c16ab807d 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/neo4j_property_graph.py +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/llama_index/graph_stores/neo4j/neo4j_property_graph.py @@ -40,6 +40,7 @@ def remove_empty_values(input_dict): # Threshold for returning all available prop values in graph schema DISTINCT_VALUE_LIMIT = 10 CHUNK_SIZE = 1000 +VECTOR_INDEX_NAME = "entity" node_properties_query = """ CALL apoc.meta.data() @@ -71,6 +72,19 @@ def remove_empty_values(input_dict): """ +def convert_operator(operator: str) -> str: + # @Todo add custom mapping for any/all + mapping = {} + mapping["=="] = "=" + mapping["!="] = "<>" + mapping["nin"] = "in" + + try: + return mapping[operator] + except KeyError: + return operator + + class Neo4jPropertyGraphStore(PropertyGraphStore): r""" Neo4j Property Graph Store. @@ -162,6 +176,13 @@ def __init__( f"""CREATE CONSTRAINT IF NOT EXISTS FOR (n:`{BASE_ENTITY_LABEL}`) REQUIRE n.id IS UNIQUE;""" ) + # Verify version to check if we can use vector index + self.verify_version() + if self._supports_vector_index: + self.structured_query( + f"CREATE VECTOR INDEX {VECTOR_INDEX_NAME} IF NOT EXISTS " + "FOR (m:__Entity__) ON m.embedding" + ) @property def client(self): @@ -558,49 +579,96 @@ def get_rel_map( return triples def structured_query( - self, query: str, param_map: Optional[Dict[str, Any]] = None + self, + query: str, + param_map: Optional[Dict[str, Any]] = None, ) -> Any: param_map = param_map or {} + try: + data, _, _ = self._driver.execute_query( + query, database=self._database, parameters_=param_map + ) + full_result = [d.data() for d in data] + + if self.sanitize_query_output: + return [value_sanitize(el) for el in full_result] + return full_result + except neo4j.exceptions.Neo4jError as e: + if not ( + ( + ( # isCallInTransactionError + e.code == "Neo.DatabaseError.Statement.ExecutionFailed" + or e.code + == "Neo.DatabaseError.Transaction.TransactionStartFailed" + ) + and "in an implicit transaction" in e.message + ) + or ( # isPeriodicCommitError + e.code == "Neo.ClientError.Statement.SemanticError" + and ( + "in an open transaction is not possible" in e.message + or "tried to execute in an explicit transaction" in e.message + ) + ) + ): + raise + # Fallback to allow implicit transactions + with self._driver.session() as session: + data = session.run(neo4j.Query(text=query), param_map) + full_result = [d.data() for d in data] - with self._driver.session(database=self._database) as session: - result = session.run(query, param_map) - full_result = [d.data() for d in result] - - if self.sanitize_query_output: - return [value_sanitize(el) for el in full_result] - return full_result + if self.sanitize_query_output: + return [value_sanitize(el) for el in full_result] + return full_result def vector_query( self, query: VectorStoreQuery, **kwargs: Any ) -> Tuple[List[LabelledNode], List[float]]: """Query the graph store with a vector store query.""" - conditions = None + conditions = [] + filter_params = {} if query.filters: - conditions = [ - f"e.{filter.key} {filter.operator.value} {filter.value}" - for filter in query.filters.filters - ] + for index, filter in enumerate(query.filters.filters): + conditions.append( + f"{'NOT' if filter.operator.value in ['nin'] else ''} e.`{filter.key}` " + f"{convert_operator(filter.operator.value)} $param_{index}" + ) + filter_params[f"param_{index}"] = filter.value filters = ( - f" {query.filters.condition.value} ".join(conditions).replace("==", "=") - if conditions is not None + f" {query.filters.condition.value} ".join(conditions) + if conditions else "1 = 1" ) - - data = self.structured_query( - f"""MATCH (e:`{BASE_ENTITY_LABEL}`) - WHERE e.embedding IS NOT NULL AND size(e.embedding) = $dimension AND ({filters}) - WITH e, vector.similarity.cosine(e.embedding, $embedding) AS score - ORDER BY score DESC LIMIT toInteger($limit) - RETURN e.id AS name, - [l in labels(e) WHERE NOT l IN ['{BASE_ENTITY_LABEL}', '{BASE_NODE_LABEL}'] | l][0] AS type, - e{{.* , embedding: Null, name: Null, id: Null}} AS properties, - score""", - param_map={ - "embedding": query.query_embedding, - "dimension": len(query.query_embedding), - "limit": query.similarity_top_k, - }, - ) + if not query.filters and self._supports_vector_index: + data = self.structured_query( + f"""CALL db.index.vector.queryNodes('{VECTOR_INDEX_NAME}', $limit, $embedding) + YIELD node, score RETURN node.id AS name, + [l in labels(node) WHERE NOT l IN ['{BASE_ENTITY_LABEL}', '{BASE_NODE_LABEL}'] | l][0] AS type, + node{{.* , embedding: Null, name: Null, id: Null}} AS properties, + score + """, + param_map={ + "embedding": query.query_embedding, + "limit": query.similarity_top_k, + }, + ) + else: + data = self.structured_query( + f"""MATCH (e:`{BASE_ENTITY_LABEL}`) + WHERE e.embedding IS NOT NULL AND size(e.embedding) = $dimension AND ({filters}) + WITH e, vector.similarity.cosine(e.embedding, $embedding) AS score + ORDER BY score DESC LIMIT toInteger($limit) + RETURN e.id AS name, + [l in labels(e) WHERE NOT l IN ['{BASE_ENTITY_LABEL}', '{BASE_NODE_LABEL}'] | l][0] AS type, + e{{.* , embedding: Null, name: Null, id: Null}} AS properties, + score""", + param_map={ + "embedding": query.query_embedding, + "dimension": len(query.query_embedding), + "limit": query.similarity_top_k, + **filter_params, + }, + ) data = data if data else [] nodes = [] @@ -930,5 +998,29 @@ def get_schema_str(self, refresh: bool = False) -> str: ] ) + def verify_version(self) -> None: + """ + Check if the connected Neo4j database version supports vector indexing + without specifying embedding dimension. + + Queries the Neo4j database to retrieve its version and compares it + against a target version (5.23.0) that is known to support vector + indexing. Raises a ValueError if the connected Neo4j version is + not supported. + """ + db_data = self.structured_query("CALL dbms.components()") + version = db_data[0]["versions"][0] + if "aura" in version: + version_tuple = (*map(int, version.split("-")[0].split(".")), 0) + else: + version_tuple = tuple(map(int, version.split("."))) + + target_version = (5, 23, 0) + + if version_tuple >= target_version: + self._supports_vector_index = True + else: + self._supports_vector_index = False + Neo4jPGStore = Neo4jPropertyGraphStore diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml index 9bbd7ded033b2..ad522d116f876 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml @@ -28,7 +28,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-graph-stores-neo4j" readme = "README.md" -version = "0.3.0" +version = "0.3.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/README.md b/llama-index-integrations/indices/llama-index-indices-managed-vectara/README.md index 6569a5652846a..2db7ebaec9a59 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/README.md +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/README.md @@ -1 +1,74 @@ # LlamaIndex Managed Integration: Vectara + +The Vectara Index provides a simple implementation to Vectara's end-to-end RAG pipeline, +including data ingestion, document retrieval, reranking results, summary generation, and hallucination evaluation. + +## Setup + +First, make sure you have the latest LlamaIndex version installed. + +Next, install the Vectara Index: + +``` +pip install -U llama-index-indices-managed-vectara +``` + +Finally, set up your Vectara corpus. If you don't have a Vectara account, you can [sign up](https://vectara.com/integrations/llamaindex) and follow our [Quick Start](https://docs.vectara.com/docs/quickstart) guide to create a corpus and an API key (make sure it has both indexing and query permissions). + +## Usage + +First let's initialize the index with some sample documents. + +```python +import os + +os.environ["VECTARA_API_KEY"] = "" +os.environ["VECTARA_CORPUS_ID"] = "" +os.environ["VECTARA_CUSTOMER_ID"] = "" + +from llama_index.indices.managed.vectara import VectaraIndex +from llama_index.core.schema import Document + +docs = [ + Document( + text=""" + This is test text for Vectara integration with LlamaIndex. + Users should love their experience with this integration + """, + ), + Document( + text=""" + The Vectara index integration with LlamaIndex implements Vectara's RAG pipeline. + It can be used both as a retriever and query engine. + """, + ), +] + +index = VectaraIndex.from_documents(docs) +``` + +You can now use this index to retrieve documents. + +```python +# Retrieves the top search result +retriever = index.as_retriever(similarity_top_k=1) + +results = retriever.retrieve("How will users feel about this new tool?") +print(results[0]) +``` + +You can also use it as a query engine to get a generated summary from the retrieved results. + +```python +query_engine = index.as_query_engine() + +results = query_engine.query( + "Which company has partnered with Vectara to implement their RAG pipeline as an index?" +) +print(f"Generated summary: {results.response}\n") +print("Top sources:") +for node in results.source_nodes[:2]: + print(node) +``` + +If you want to see the full features and capabilities of `VectaraIndex`, check out this Jupyter [notebook](https://github.com/vectara/example-notebooks/blob/main/notebooks/using-vectara-with-llamaindex.ipynb). diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/__init__.py b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/__init__.py index bfeff0aa0d3a3..70e75e94507a2 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/__init__.py +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/__init__.py @@ -3,5 +3,11 @@ VectaraAutoRetriever, VectaraRetriever, ) +from llama_index.indices.managed.vectara.query import VectaraQueryEngine -__all__ = ["VectaraIndex", "VectaraRetriever", "VectaraAutoRetriever"] +__all__ = [ + "VectaraIndex", + "VectaraRetriever", + "VectaraAutoRetriever", + "VectaraQueryEngine", +] diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/base.py b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/base.py index 1dc6aa39f300e..46a95590ef673 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/base.py +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/base.py @@ -3,7 +3,6 @@ A managed Index - where the index is accessible via some API that interfaces a managed service. - """ import json @@ -70,6 +69,7 @@ def __init__( vectara_api_key: Optional[str] = None, use_core_api: bool = False, parallelize_ingest: bool = False, + x_source_str: str = "llama_index", **kwargs: Any, ) -> None: """Initialize the Vectara API.""" @@ -104,6 +104,9 @@ def __init__( else: _logger.debug(f"Using corpus id {self._vectara_corpus_id}") + # identifies usage source for internal measurement + self._x_source_str = x_source_str + # setup requests session with max 3 retries and 90s timeout # for calling Vectara API self._session = requests.Session() # to reuse connections @@ -149,7 +152,7 @@ def _get_post_headers(self) -> dict: "x-api-key": self._vectara_api_key, "customer-id": self._vectara_customer_id, "Content-Type": "application/json", - "X-Source": "llama_index", + "X-Source": self._x_source_str, } def _delete_doc(self, doc_id: str, corpus_id: Optional[str] = None) -> bool: diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/retriever.py b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/retriever.py index 2692c696caeb1..386c83098cba5 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/retriever.py +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/retriever.py @@ -35,6 +35,7 @@ MMR_RERANKER_ID = 272725718 SLINGSHOT_RERANKER_ID = 272725719 +UDF_RERANKER_ID = 272725722 class VectaraReranker(str, Enum): @@ -42,6 +43,7 @@ class VectaraReranker(str, Enum): MMR = "mmr" SLINGSHOT_ALT_NAME = "slingshot" SLINGSHOT = "multilingual_reranker_v1" + UDF = "udf" class VectaraRetriever(BaseRetriever): @@ -51,7 +53,7 @@ class VectaraRetriever(BaseRetriever): Args: index (VectaraIndex): the Vectara Index similarity_top_k (int): number of top k results to return, defaults to 5. - reranker (str): reranker to use: none, mmr or multilingual_reranker_v1. + reranker (str): reranker to use: none, mmr, multilingual_reranker_v1, or udf. Note that "multilingual_reranker_v1" is a Vectara Scale feature only. lambda_val (float): for hybrid search. 0 = neural search only. @@ -67,16 +69,23 @@ class VectaraRetriever(BaseRetriever): of diversity among the results with 0 corresponding to minimum diversity and 1 to maximum diversity. Defaults to 0.3. + udf_expression: the user defined expression for reranking results. + See (https://docs.vectara.com/docs/learn/user-defined-function-reranker) + for more details about syntax for udf reranker expressions. summary_enabled: whether to generate summaries or not. Defaults to False. summary_response_lang: language to use for summary generation. summary_num_results: number of results to use for summary generation. summary_prompt_name: name of the prompt to use for summary generation. - citations_url_pattern: URL pattern for citations. If non-empty, specifies - the URL pattern to use for citations; for example "{doc.url}". - see (https://docs.vectara.com/docs/api-reference/search-apis/search + citations_style: The style of the citations in the summary generation, + either "numeric", "html", "markdown", or "none". + This is a Vectara Scale only feature. Defaults to None. + citations_url_pattern: URL pattern for html and markdown citations. + If non-empty, specifies the URL pattern to use for citations; e.g. "{doc.url}". + See (https://docs.vectara.com/docs/api-reference/search-apis/search #citation-format-in-summary) for more details. - If unspecified, citations are generated in numeric form [1],[2], etc This is a Vectara Scale only feature. Defaults to None. + citations_text_pattern: The displayed text for citations. + Must be specified for html and markdown citations. """ def __init__( @@ -90,12 +99,16 @@ def __init__( reranker: VectaraReranker = VectaraReranker.NONE, rerank_k: int = 50, mmr_diversity_bias: float = 0.3, + udf_expression: str = None, summary_enabled: bool = False, summary_response_lang: str = "eng", summary_num_results: int = 7, summary_prompt_name: str = "vectara-summary-ext-24-05-sml", + citations_style: Optional[str] = None, citations_url_pattern: Optional[str] = None, + citations_text_pattern: Optional[str] = None, callback_manager: Optional[CallbackManager] = None, + x_source_str: str = "llama_index", **kwargs: Any, ) -> None: """Initialize params.""" @@ -105,7 +118,10 @@ def __init__( self._n_sentences_before = n_sentences_before self._n_sentences_after = n_sentences_after self._filter = filter + self._citations_style = citations_style.upper() if citations_style else None self._citations_url_pattern = citations_url_pattern + self._citations_text_pattern = citations_text_pattern + self._x_source_str = x_source_str if reranker == VectaraReranker.MMR: self._rerank = True @@ -119,6 +135,11 @@ def __init__( self._rerank = True self._rerank_k = rerank_k self._reranker_id = SLINGSHOT_RERANKER_ID + elif reranker == VectaraReranker.UDF and udf_expression is not None: + self._rerank = True + self._rerank_k = rerank_k + self._udf_expression = udf_expression + self._reranker_id = UDF_RERANKER_ID else: self._rerank = False @@ -137,7 +158,7 @@ def _get_post_headers(self) -> dict: "x-api-key": self._index._vectara_api_key, "customer-id": self._index._vectara_customer_id, "Content-Type": "application/json", - "X-Source": "llama_index", + "X-Source": self._x_source_str, } @property @@ -206,6 +227,9 @@ def _build_vectara_query_body( reranking_config["mmrConfig"] = { "diversityBias": self._mmr_diversity_bias } + elif self._reranker_id == UDF_RERANKER_ID: + reranking_config["userFunction"] = self._udf_expression + data["query"][0]["rerankingConfig"] = reranking_config if self._summary_enabled: @@ -220,11 +244,18 @@ def _build_vectara_query_body( "store": True, "conversationId": chat_conv_id, } - if self._citations_url_pattern: - data["query"][0]["summary"][0]["citationParams"] = { - "style": "MARKDOWN", - "url_pattern": self._citations_url_pattern, - } + + if self._citations_style: + if self._citations_style in ["NUMERIC", "NONE"]: + data["query"][0]["summary"][0]["citationParams"] = { + "style": self._citations_style, + } + elif self._citations_url_pattern and self._citations_text_pattern: + data["query"][0]["summary"][0]["citationParams"] = { + "style": self._citations_style, + "urlPattern": self._citations_url_pattern, + "textPattern": self._citations_text_pattern, + } return data diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/pyproject.toml b/llama-index-integrations/indices/llama-index-indices-managed-vectara/pyproject.toml index 41251609aabbb..cd545bad34611 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/pyproject.toml +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/pyproject.toml @@ -25,13 +25,13 @@ ignore_missing_imports = true python_version = "3.8" [tool.poetry] -authors = ["Ofer Mendelevitch "] +authors = ["David Oplatka ", "Ofer Mendelevitch "] description = "llama-index managed vectara integration" exclude = ["**/BUILD"] license = "MIT" name = "llama-index-indices-managed-vectara" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/tests/test_indices_managed_vectara.py b/llama-index-integrations/indices/llama-index-indices-managed-vectara/tests/test_indices_managed_vectara.py index c9883c736e904..385a8bd067669 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/tests/test_indices_managed_vectara.py +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/tests/test_indices_managed_vectara.py @@ -3,14 +3,21 @@ from llama_index.core.indices.managed.base import BaseManagedIndex from llama_index.indices.managed.vectara import VectaraIndex import pytest +import re # # For this test to run properly, please setup as follows: # 1. Create a Vectara account: sign up at https://console.vectara.com/signup -# 2. Create a corpus in your Vectara account, with a "filter attribute" called "test_num". +# 2. Create a corpus in your Vectara account, with the following filter attributes: +# a. doc.test_num (text) +# b. doc.test_score (integer) +# c. doc.date (text) +# d. doc.url (text) # 3. Create an API_KEY for this corpus with permissions for query and indexing # 4. Setup environment variables: -# VECTARA_API_KEY, VECTARA_CORPUS_ID and VECTARA_CUSTOMER_ID +# VECTARA_API_KEY, VECTARA_CORPUS_ID, VECTARA_CUSTOMER_ID, and OPENAI_API_KEY +# +# Note: In order to run test_citations, you will need a Scale account. # @@ -23,19 +30,19 @@ def get_docs() -> List[Document]: inputs = [ { "text": "This is test text for Vectara integration with LlamaIndex", - "metadata": {"test_num": "1"}, + "metadata": {"test_num": "1", "test_score": 10, "date": "2020-02-25"}, }, { "text": "And now for something completely different", - "metadata": {"test_num": "2"}, + "metadata": {"test_num": "2", "test_score": 2, "date": "2015-10-13"}, }, { "text": "when 900 years you will be, look as good you will not", - "metadata": {"test_num": "3"}, + "metadata": {"test_num": "3", "test_score": 20, "date": "2023-09-12"}, }, { "text": "when 850 years you will be, look as good you will not", - "metadata": {"test_num": "4"}, + "metadata": {"test_num": "4", "test_score": 50, "date": "2022-01-01"}, }, ] docs: List[Document] = [] @@ -66,9 +73,9 @@ def vectara1(): def test_simple_retrieval(vectara1) -> None: docs = get_docs() qe = vectara1.as_retriever(similarity_top_k=1) - res = qe.retrieve("how will I look?") + res = qe.retrieve("Find me something different") assert len(res) == 1 - assert res[0].node.get_content() == docs[2].text + assert res[0].node.get_content() == docs[1].text def test_mmr_retrieval(vectara1) -> None: @@ -108,11 +115,43 @@ def test_retrieval_with_filter(vectara1) -> None: assert isinstance(vectara1, VectaraIndex) qe = vectara1.as_retriever(similarity_top_k=1, filter="doc.test_num = '1'") - res = qe.retrieve("how will I look?") + res = qe.retrieve("What does this test?") assert len(res) == 1 assert res[0].node.get_content() == docs[0].text +def test_udf_retrieval(vectara1) -> None: + docs = get_docs() + + # test with basic math expression + qe = vectara1.as_retriever( + similarity_top_k=2, + n_sentences_before=0, + n_sentences_after=0, + reranker="udf", + udf_expression="get('$.score') + get('$.document_metadata.test_score')", + ) + + res = qe.retrieve("What will the future look like?") + assert len(res) == 2 + assert res[0].node.get_content() == docs[3].text + assert res[1].node.get_content() == docs[2].text + + # test with dates: Weight of score subtracted by number of years from current date + qe = vectara1.as_retriever( + similarity_top_k=2, + n_sentences_before=0, + n_sentences_after=0, + reranker="udf", + udf_expression="max(0, 5 * get('$.score') - (to_unix_timestamp(now()) - to_unix_timestamp(datetime_parse(get('$.document_metadata.date'), 'yyyy-MM-dd'))) / 31536000)", + ) + + res = qe.retrieve("What will the future look like?") + assert res[0].node.get_content() == docs[2].text + assert res[1].node.get_content() == docs[3].text + assert len(res) == 2 + + @pytest.fixture() def vectara2(): try: @@ -160,9 +199,35 @@ def test_file_upload(vectara2) -> None: assert "paul graham" in str(res).lower() and "software" in str(res).lower() # test query with Vectara summarization (default) + query_engine = vectara2.as_query_engine(similarity_top_k=3) + res = query_engine.query("How is Paul related to Reddit?") + summary = res.response + assert "paul graham" in summary.lower() and "reddit" in summary.lower() + assert "https://www.paulgraham.com/worked.html" in str(res.source_nodes) + + +def test_citations(vectara2) -> None: + # test markdown citations query_engine = vectara2.as_query_engine( - similarity_top_k=3, citations_url_pattern="{doc.url}" + similarity_top_k=10, + summary_num_results=7, + summary_prompt_name="vectara-summary-ext-24-05-med-omni", + citations_style="markdown", + citations_url_pattern="{doc.url}", + citations_text_pattern="(source)", ) - res = query_engine.query("How is Paul related to Reddit?") - assert "paul graham" in str(res).lower() and "reddit" in str(res).lower() - assert "https://www.paulgraham.com/worked.html" in str(res).lower() + res = query_engine.query("Describe Paul's early life and career.") + summary = res.response + assert "(source)" in summary + assert "https://www.paulgraham.com/worked.html" in summary + + # test numeric citations + query_engine = vectara2.as_query_engine( + similarity_top_k=10, + summary_num_results=7, + summary_prompt_name="mockingbird-1.0-2024-07-16", + citations_style="numeric", + ) + res = query_engine.query("Describe Paul's early life and career.") + summary = res.response + assert re.search(r"\[\d+\]", summary) diff --git a/llama-index-integrations/llms/llama-index-llms-ai21/README.md b/llama-index-integrations/llms/llama-index-llms-ai21/README.md index de3c66dd6bd8a..ada0c68ead155 100644 --- a/llama-index-integrations/llms/llama-index-llms-ai21/README.md +++ b/llama-index-integrations/llms/llama-index-llms-ai21/README.md @@ -20,7 +20,7 @@ You need to initialize the AI21 client with the appropriate model and API key. from llama_index.llms.ai21 import AI21 api_key = "your_api_key" -llm = AI21(model="jamba-instruct", api_key=api_key) +llm = AI21(model="jamba-1.5-mini", api_key=api_key) ``` ### Chat Completions @@ -30,7 +30,7 @@ from llama_index.llms.ai21 import AI21 from llama_index.core.base.llms.types import ChatMessage api_key = "your_api_key" -llm = AI21(model="jamba-instruct", api_key=api_key) +llm = AI21(model="jamba-1.5-mini", api_key=api_key) messages = [ChatMessage(role="user", content="What is the meaning of life?")] response = llm.chat(messages) @@ -44,7 +44,7 @@ from llama_index.llms.ai21 import AI21 from llama_index.core.base.llms.types import ChatMessage api_key = "your_api_key" -llm = AI21(model="jamba-instruct", api_key=api_key) +llm = AI21(model="jamba-1.5-mini", api_key=api_key) messages = [ChatMessage(role="user", content="What is the meaning of life?")] @@ -58,7 +58,7 @@ for chunk in llm.stream_chat(messages): from llama_index.llms.ai21 import AI21 api_key = "your_api_key" -llm = AI21(model="jamba-instruct", api_key=api_key) +llm = AI21(model="jamba-1.5-mini", api_key=api_key) response = llm.complete(prompt="What is the meaning of life?") print(response.text) @@ -70,7 +70,7 @@ print(response.text) from llama_index.llms.ai21 import AI21 api_key = "your_api_key" -llm = AI21(model="jamba-instruct", api_key=api_key) +llm = AI21(model="jamba-1.5-mini", api_key=api_key) response = llm.stream_complete(prompt="What is the meaning of life?") @@ -118,7 +118,7 @@ The type of the tokenizer is determined by the name of the model from llama_index.llms.ai21 import AI21 api_key = "your_api_key" -llm = AI21(model="jamba-instruct", api_key=api_key) +llm = AI21(model="jamba-1.5-mini", api_key=api_key) tokenizer = llm.tokenizer tokens = tokenizer.encode("What is the meaning of life?") @@ -141,7 +141,7 @@ from llama_index.core.base.llms.types import ChatMessage async def main(): api_key = "your_api_key" - llm = AI21(model="jamba-instruct", api_key=api_key) + llm = AI21(model="jamba-1.5-mini", api_key=api_key) messages = [ ChatMessage(role="user", content="What is the meaning of life?") @@ -159,7 +159,7 @@ from llama_index.core.base.llms.types import ChatMessage async def main(): api_key = "your_api_key" - llm = AI21(model="jamba-instruct", api_key=api_key) + llm = AI21(model="jamba-1.5-mini", api_key=api_key) messages = [ ChatMessage(role="user", content="What is the meaning of life?") @@ -169,3 +169,54 @@ async def main(): async for chunk in response: print(chunk.message.content) ``` + +## Tool Calling + +```python +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.llms.ai21 import AI21 +from llama_index.core.tools import FunctionTool + + +def multiply(a: int, b: int) -> int: + """Multiply two integers and returns the result integer""" + return a * b + + +def subtract(a: int, b: int) -> int: + """Subtract two integers and returns the result integer""" + return a - b + + +def divide(a: int, b: int) -> float: + """Divide two integers and returns the result float""" + return a - b + + +def add(a: int, b: int) -> int: + """Add two integers and returns the result integer""" + return a + b + + +multiply_tool = FunctionTool.from_defaults(fn=multiply) +add_tool = FunctionTool.from_defaults(fn=add) +subtract_tool = FunctionTool.from_defaults(fn=subtract) +divide_tool = FunctionTool.from_defaults(fn=divide) + +api_key = "your_api_key" + +llm = AI21(model="jamba-1.5-mini", api_key=api_key) + +agent_worker = FunctionCallingAgentWorker.from_tools( + [multiply_tool, add_tool, subtract_tool, divide_tool], + llm=llm, + verbose=True, + allow_parallel_tool_calls=True, +) +agent = agent_worker.as_agent() + +response = agent.chat( + "My friend Moses had 10 apples. He ate 5 apples in the morning. Then he found a box with 25 apples." + "He divided all his apples between his 5 friends. How many apples did each friend get?" +) +``` diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/.gitignore b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/BUILD b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/Makefile b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/README.md b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/README.md new file mode 100644 index 0000000000000..7d699eacf0641 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/README.md @@ -0,0 +1,25 @@ +# LlamaIndex Llms Integration: Alibabacloud_Aisearch + +## Installation + +``` +pip install llama-index-llms-alibabacloud-aisearch +``` + +## Usage + +For further details, please visit [text-generation-api-details](https://help.aliyun.com/zh/open-search/search-platform/developer-reference/text-generation-api-details). + +You can specify the `endpoint` and `aisearch_api_key` in the constructor, or set the environment variables `AISEARCH_ENDPOINT` and `AISEARCH_API_KEY`. + +```python +from llama_index.llms.alibabacloud_aisearch import AlibabaCloudAISearchLLM +from llama_index.core import ChatPromptTemplate + +llm = AlibabaCloudAISearchLLM(service_id="ops-qwen-turbo") +prompt_template = ChatPromptTemplate.from_messages( + [("system", "You are helpful assistant."), ("user", "{prompt}")] +) +messages = prompt_template.format_messages(prompt="") +print(llm.chat(messages)) +``` diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/BUILD b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/__init__.py b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/__init__.py new file mode 100644 index 0000000000000..2df34a4bbcc87 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/__init__.py @@ -0,0 +1,4 @@ +from llama_index.llms.alibabacloud_aisearch.base import AlibabaCloudAISearchLLM + + +__all__ = ["AlibabaCloudAISearchLLM"] diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/base.py b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/base.py new file mode 100644 index 0000000000000..185a54199b88d --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/llama_index/llms/alibabacloud_aisearch/base.py @@ -0,0 +1,226 @@ +import asyncio +import time +from typing import Dict, Any, List, Sequence + +from llama_index.core.llms import ( + CustomLLM, + CompletionResponse, + LLMMetadata, +) +from llama_index.core.llms.callbacks import llm_chat_callback, llm_completion_callback +from llama_index.core.bridge.pydantic import Field, PrivateAttr +from llama_index.core.base.llms.types import ( + ChatMessage, + ChatResponse, + MessageRole, +) +from llama_index.core.base.llms.generic_utils import get_from_param_or_env + +try: + from alibabacloud_searchplat20240529.models import ( + GetTextGenerationRequest, + GetTextGenerationResponse, + GetTextGenerationRequestMessages, + ) + from alibabacloud_tea_openapi.models import Config as AISearchConfig + from alibabacloud_searchplat20240529.client import Client + from alibabacloud_tea_util.models import RuntimeOptions + from Tea.exceptions import TeaException +except ImportError: + raise ImportError( + "Could not import alibabacloud_searchplat20240529 python package. " + "Please install it with `pip install alibabacloud-searchplat20240529`." + ) + + +def retry_decorator(func, wait_seconds: int = 1): + def wrap(*args, **kwargs): + while True: + try: + return func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + time.sleep(wait_seconds) + else: + raise + + return wrap + + +def aretry_decorator(func, wait_seconds: int = 1): + async def wrap(*args, **kwargs): + while True: + try: + return await func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + await asyncio.sleep(wait_seconds) + else: + raise + + return wrap + + +class AlibabaCloudAISearchLLM(CustomLLM): + """ + For further details, please visit `https://help.aliyun.com/zh/open-search/search-platform/developer-reference/text-generation-api-details`. + """ + + _client: Client = PrivateAttr() + _options: RuntimeOptions = PrivateAttr() + + aisearch_api_key: str = Field(default=None, exclude=True) + endpoint: str = None + + service_id: str = "ops-qwen-turbo" + workspace_name: str = "default" + + temperature: float = 0.5 + top_k: float = 1 + additional_kwargs: Dict[str, Any] = Field(default_factory=dict) + + read_timeout: int = 60000 + connection_timeout: int = 5000 + csi_level: str = "strict" + + def __init__( + self, endpoint: str = None, aisearch_api_key: str = None, **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.aisearch_api_key = get_from_param_or_env( + "aisearch_api_key", aisearch_api_key, "AISEARCH_API_KEY" + ) + self.endpoint = get_from_param_or_env("endpoint", endpoint, "AISEARCH_ENDPOINT") + + config = AISearchConfig( + bearer_token=self.aisearch_api_key, + endpoint=self.endpoint, + protocol="http", + ) + + self._client = Client(config=config) + + self._options = RuntimeOptions( + read_timeout=self.read_timeout, connect_timeout=self.connection_timeout + ) + + @property + def metadata(self) -> LLMMetadata: + """Get LLM metadata.""" + return LLMMetadata(model_name=self.service_id, is_chat_model=True) + + @property + def _default_params(self) -> Dict[str, Any]: + return { + "temperature": self.temperature, + "top_k": self.top_k, + **self.additional_kwargs, + } + + @staticmethod + def _convert_chat_messages( + messages: Sequence[ChatMessage], + ) -> List[GetTextGenerationRequestMessages]: + results = [] + for message in messages: + message = GetTextGenerationRequestMessages( + content=message.content, role=message.role + ) + results.append(message) + return results + + @retry_decorator + def _get_text_generation( + self, messages: List[GetTextGenerationRequestMessages], **kwargs: Any + ) -> GetTextGenerationResponse: + parameters: Dict[str, Any] = self._default_params + parameters.update(kwargs) + request = GetTextGenerationRequest( + csi_level=self.csi_level, messages=messages, parameters=parameters + ) + + response: GetTextGenerationResponse = ( + self._client.get_text_generation_with_options( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + headers={}, + runtime=self._options, + ) + ) + return response + + @aretry_decorator + async def _aget_text_generation( + self, messages: List[GetTextGenerationRequestMessages], **kwargs: Any + ) -> GetTextGenerationResponse: + parameters: Dict[str, Any] = self._default_params + parameters.update(kwargs) + request = GetTextGenerationRequest( + csi_level=self.csi_level, messages=messages, parameters=parameters + ) + + response: GetTextGenerationResponse = ( + await self._client.get_text_generation_with_options_async( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + headers={}, + runtime=self._options, + ) + ) + + return response + + @llm_completion_callback() + def complete(self, prompt: str, **kwargs: Any) -> CompletionResponse: + messages = [ + GetTextGenerationRequestMessages(content=prompt, role=MessageRole.USER) + ] + response: GetTextGenerationResponse = self._get_text_generation( + messages, **kwargs + ) + text = response.body.result.text + return CompletionResponse(text=text, raw=response) + + def stream_complete(self, messages: Any, **kwargs: Any) -> CompletionResponse: + raise NotImplementedError + + @llm_completion_callback() + async def acomplete(self, prompt: str, **kwargs: Any) -> CompletionResponse: + messages = [ + GetTextGenerationRequestMessages(content=prompt, role=MessageRole.USER) + ] + response: GetTextGenerationResponse = await self._aget_text_generation( + messages, **kwargs + ) + text = response.body.result.text + return CompletionResponse(text=text, raw=response) + + @llm_chat_callback() + def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: + messages = self._convert_chat_messages(messages) + response: GetTextGenerationResponse = self._get_text_generation( + messages, **kwargs + ) + text = response.body.result.text + return ChatResponse( + message=ChatMessage(role=MessageRole.ASSISTANT, content=text), raw=response + ) + + @llm_chat_callback() + async def achat( + self, messages: Sequence[ChatMessage], **kwargs: Any + ) -> ChatResponse: + messages = self._convert_chat_messages(messages) + response: GetTextGenerationResponse = await self._aget_text_generation( + messages, **kwargs + ) + text = response.body.result.text + return ChatResponse( + message=ChatMessage(role=MessageRole.ASSISTANT, content=text), raw=response + ) + + @classmethod + def class_name(cls) -> str: + return "AlibabaCloudAISearchLLM" diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/pyproject.toml new file mode 100644 index 0000000000000..1273116989a57 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.llms.alibabacloud_aisearch" + +[tool.llamahub.class_authors] +AlibabaCloudAISearchLLM = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["llama-index"] +description = "llama-index llms alibabacloud_aisearch integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-llms-alibabacloud-aisearch" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +alibabacloud-searchplat20240529 = "^1.1.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/BUILD b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/__init__.py b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/test_llms_alibabacloud_aisearch.py b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/test_llms_alibabacloud_aisearch.py new file mode 100644 index 0000000000000..c6180ad73edf0 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-alibabacloud-aisearch/tests/test_llms_alibabacloud_aisearch.py @@ -0,0 +1,7 @@ +from llama_index.core.base.llms.base import BaseLLM +from llama_index.llms.alibabacloud_aisearch import AlibabaCloudAISearchLLM + + +def test_class(): + names_of_base_classes = [b.__name__ for b in AlibabaCloudAISearchLLM.__mro__] + assert BaseLLM.__name__ in names_of_base_classes diff --git a/llama-index-integrations/llms/llama-index-llms-azure-inference/llama_index/llms/azure_inference/base.py b/llama-index-integrations/llms/llama-index-llms-azure-inference/llama_index/llms/azure_inference/base.py index 9f8f4d4ed6981..bbdd3ec918417 100644 --- a/llama-index-integrations/llms/llama-index-llms-azure-inference/llama_index/llms/azure_inference/base.py +++ b/llama-index-integrations/llms/llama-index-llms-azure-inference/llama_index/llms/azure_inference/base.py @@ -1,6 +1,7 @@ """Azure AI model inference chat completions client.""" import json +import logging from typing import ( Any, Callable, @@ -24,7 +25,7 @@ LLMMetadata, MessageRole, ) -from llama_index.core.bridge.pydantic import Field, PrivateAttr, BaseModel +from llama_index.core.bridge.pydantic import Field, PrivateAttr, BaseModel, ConfigDict from llama_index.core.callbacks import CallbackManager from llama_index.core.constants import DEFAULT_TEMPERATURE from llama_index.core.llms.callbacks import ( @@ -51,12 +52,15 @@ from azure.core.credentials import TokenCredential from azure.core.credentials import AzureKeyCredential +from azure.core.exceptions import HttpResponseError from azure.ai.inference.models import ( ChatCompletionsToolCall, ChatRequestMessage, ChatResponseMessage, ) +logger = logging.getLogger(__name__) + def to_inference_message( messages: Sequence[ChatMessage], @@ -175,6 +179,7 @@ class AzureAICompletionsModel(FunctionCallingLLM): ``` """ + model_config = ConfigDict(protected_namespaces=()) model_name: Optional[str] = Field( default=None, description="The model id to use. Optional for endpoints running a single model.", @@ -182,8 +187,8 @@ class AzureAICompletionsModel(FunctionCallingLLM): temperature: float = Field( default=DEFAULT_TEMPERATURE, description="The temperature to use for sampling.", - gte=0.0, - lte=1.0, + ge=0.0, + le=1.0, ) max_tokens: Optional[int] = Field( default=None, @@ -192,7 +197,8 @@ class AzureAICompletionsModel(FunctionCallingLLM): ) seed: str = Field(default=None, description="The random seed to use for sampling.") model_kwargs: Dict[str, Any] = Field( - default_factory=dict, description="Additional kwargs model parameters." + default_factory=dict, + description="Additional kwargs model parameters.", ) _client: ChatCompletionsClient = PrivateAttr() @@ -279,11 +285,25 @@ def class_name(cls) -> str: @property def metadata(self) -> LLMMetadata: if not self._model_name: - model_info = self._client.get_model_info() - if model_info: - self._model_name = model_info.get("model_name", None) - self._model_type = model_info.get("model_type", None) - self._model_provider = model_info.get("model_provider_name", None) + model_info = None + try: + # Get model info from the endpoint. This method may not be supported by all + # endpoints. + model_info = self._client.get_model_info() + except HttpResponseError: + logger.warning( + f"Endpoint '{self._client._config.endpoint}' does not support model metadata retrieval. " + "Failed to get model info for method `metadata()`." + ) + finally: + if model_info: + self._model_name = model_info.get("model_name", None) + self._model_type = model_info.get("model_type", None) + self._model_provider = model_info.get("model_provider_name", None) + else: + self._model_name = self.model_name or "unknown" + self._model_type = "unknown" + self._model_provider = "unknown" return LLMMetadata( is_chat_model=self._model_type == "chat-completions", diff --git a/llama-index-integrations/llms/llama-index-llms-azure-inference/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-azure-inference/pyproject.toml index 50d84e85c7061..8849f78791f62 100644 --- a/llama-index-integrations/llms/llama-index-llms-azure-inference/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-azure-inference/pyproject.toml @@ -28,7 +28,7 @@ license = "MIT" name = "llama-index-llms-azure-inference" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.1" +version = "0.2.3" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/llms/llama-index-llms-azure-inference/tests/test_llms_azure_inference.py b/llama-index-integrations/llms/llama-index-llms-azure-inference/tests/test_llms_azure_inference.py index 3ffd23de89c77..9cd542c763c32 100644 --- a/llama-index-integrations/llms/llama-index-llms-azure-inference/tests/test_llms_azure_inference.py +++ b/llama-index-integrations/llms/llama-index-llms-azure-inference/tests/test_llms_azure_inference.py @@ -1,3 +1,4 @@ +import logging import os import pytest import json @@ -5,6 +6,8 @@ from llama_index.core.llms import ChatMessage, MessageRole from llama_index.core.tools import FunctionTool +logger = logging.getLogger(__name__) + @pytest.mark.skipif( not { @@ -118,3 +121,29 @@ def echo(message: str) -> str: response.message.additional_kwargs["tool_calls"][0]["function"]["name"] == "echo" ) + + +@pytest.mark.skipif( + not { + "AZURE_INFERENCE_ENDPOINT", + "AZURE_INFERENCE_CREDENTIAL", + }.issubset(set(os.environ)), + reason="Azure AI endpoint and/or credential are not set.", +) +def test_get_metadata(caplog): + """Tests if we can get model metadata back from the endpoint. If so, + model_name should not be 'unknown'. Some endpoints may not support this + and in those cases a warning should be logged. + """ + # In case the endpoint being tested serves more than one model + model_name = os.environ.get("AZURE_INFERENCE_MODEL", None) + + llm = AzureAICompletionsModel(model_name=model_name) + + response = llm.metadata + + assert ( + response.model_name != "unknown" + or "does not support model metadata retrieval" in caplog.text + ) + assert not model_name or response.model_name == model_name diff --git a/llama-index-integrations/llms/llama-index-llms-azure-openai/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-azure-openai/pyproject.toml index d82b0f3808140..10304ae111170 100644 --- a/llama-index-integrations/llms/llama-index-llms-azure-openai/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-azure-openai/pyproject.toml @@ -29,11 +29,11 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-azure-openai" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -llama-index-llms-openai = "^0.2.0" +llama-index-llms-openai = "^0.2.1" azure-identity = "^1.15.0" httpx = "*" llama-index-core = "^0.11.0" diff --git a/llama-index-integrations/llms/llama-index-llms-bedrock-converse/llama_index/llms/bedrock_converse/base.py b/llama-index-integrations/llms/llama-index-llms-bedrock-converse/llama_index/llms/bedrock_converse/base.py index e496b2bb466bc..23c9c03d772af 100644 --- a/llama-index-integrations/llms/llama-index-llms-bedrock-converse/llama_index/llms/bedrock_converse/base.py +++ b/llama-index-integrations/llms/llama-index-llms-bedrock-converse/llama_index/llms/bedrock_converse/base.py @@ -467,7 +467,7 @@ async def astream_chat( async def gen() -> ChatResponseAsyncGen: content = {} role = MessageRole.ASSISTANT - for chunk in response["stream"]: + async for chunk in response["stream"]: if content_block_delta := chunk.get("contentBlockDelta"): content_delta = content_block_delta["delta"] content = join_two_dicts(content, content_delta) diff --git a/llama-index-integrations/llms/llama-index-llms-bedrock-converse/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-bedrock-converse/pyproject.toml index e1390ea9a8bfa..0aac7aa1e4312 100644 --- a/llama-index-integrations/llms/llama-index-llms-bedrock-converse/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-bedrock-converse/pyproject.toml @@ -27,11 +27,10 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-bedrock-converse" readme = "README.md" -version = "0.2.2" +version = "0.3.0" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -llama-index-llms-anthropic = "^0.2.0" boto3 = "^1.34.122" aioboto3 = "^13.1.1" llama-index-core = "^0.11.0" diff --git a/llama-index-integrations/llms/llama-index-llms-bedrock/llama_index/llms/bedrock/base.py b/llama-index-integrations/llms/llama-index-llms-bedrock/llama_index/llms/bedrock/base.py index 9b001fa79475a..55732f8598bdb 100644 --- a/llama-index-integrations/llms/llama-index-llms-bedrock/llama_index/llms/bedrock/base.py +++ b/llama-index-integrations/llms/llama-index-llms-bedrock/llama_index/llms/bedrock/base.py @@ -189,9 +189,15 @@ def __init__( output_parser=output_parser, ) self._provider = get_provider(model) - messages_to_prompt = messages_to_prompt or self._provider.messages_to_prompt - completion_to_prompt = ( - completion_to_prompt or self._provider.completion_to_prompt + self.messages_to_prompt = ( + messages_to_prompt + or self._provider.messages_to_prompt + or self.messages_to_prompt + ) + self.completion_to_prompt = ( + completion_to_prompt + or self._provider.completion_to_prompt + or self.completion_to_prompt ) # Prior to general availability, custom boto3 wheel files were # distributed that used the bedrock service to invokeModel. diff --git a/llama-index-integrations/llms/llama-index-llms-bedrock/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-bedrock/pyproject.toml index e4d8d27bcea03..d2efb18d4b249 100644 --- a/llama-index-integrations/llms/llama-index-llms-bedrock/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-bedrock/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-bedrock" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/llms/llama-index-llms-huggingface/llama_index/llms/huggingface/base.py b/llama-index-integrations/llms/llama-index-llms-huggingface/llama_index/llms/huggingface/base.py index 4a80b09e1cc3f..94503c0aa4f5a 100644 --- a/llama-index-integrations/llms/llama-index-llms-huggingface/llama_index/llms/huggingface/base.py +++ b/llama-index-integrations/llms/llama-index-llms-huggingface/llama_index/llms/huggingface/base.py @@ -261,7 +261,7 @@ def __init__( if tokenizer.name_or_path != model_name: logger.warning( - f"The model `{model_name}` and tokenizer `{self._tokenizer.name_or_path}` " + f"The model `{model_name}` and tokenizer `{tokenizer.name_or_path}` " f"are different, please ensure that they are compatible." ) diff --git a/llama-index-integrations/llms/llama-index-llms-huggingface/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-huggingface/pyproject.toml index ac0b3d7f6443a..32995db7872b1 100644 --- a/llama-index-integrations/llms/llama-index-llms-huggingface/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-huggingface/pyproject.toml @@ -28,7 +28,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-huggingface" readme = "README.md" -version = "0.3.1" +version = "0.3.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py b/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py index 078cbed49551e..c2a123b11dc1c 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py @@ -48,7 +48,7 @@ if TYPE_CHECKING: from llama_index.core.tools.types import BaseTool -DEFAULT_MISTRALAI_MODEL = "mistral-tiny" +DEFAULT_MISTRALAI_MODEL = "mistral-large-latest" DEFAULT_MISTRALAI_ENDPOINT = "https://api.mistral.ai" DEFAULT_MISTRALAI_MAX_TOKENS = 512 @@ -123,7 +123,7 @@ class MistralAI(FunctionCallingLLM): max_retries: int = Field( default=5, description="The maximum number of API retries.", gte=0 ) - random_seed: str = Field( + random_seed: Optional[int] = Field( default=None, description="The random seed to use for sampling." ) additional_kwargs: Dict[str, Any] = Field( diff --git a/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml index 37d46858af3db..1296975f0f094 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-mistralai" readme = "README.md" -version = "0.2.1" +version = "0.2.3" [tool.poetry.dependencies] python = ">=3.9,<4.0" diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/base.py b/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/base.py index f1b14ec8681c0..f00f69f765670 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/base.py +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/base.py @@ -1,21 +1,33 @@ -from typing import ( - Any, - Optional, - List, - Literal, -) +from typing import Any, Optional, List, Literal, Union, Dict, TYPE_CHECKING from deprecated import deprecated import warnings +import json from llama_index.core.bridge.pydantic import PrivateAttr, BaseModel from llama_index.core.base.llms.generic_utils import ( get_from_param_or_env, ) +from llama_index.llms.nvidia.utils import ( + is_nvidia_function_calling_model, + is_chat_model, +) from llama_index.llms.openai_like import OpenAILike +from llama_index.core.llms.function_calling import FunctionCallingLLM from urllib.parse import urlparse, urlunparse +from llama_index.core.base.llms.types import ( + ChatMessage, + ChatResponse, + MessageRole, +) + +from llama_index.core.llms.llm import ToolSelection + +if TYPE_CHECKING: + from llama_index.core.tools.types import BaseTool + DEFAULT_MODEL = "meta/llama3-8b-instruct" BASE_URL = "https://integrate.api.nvidia.com/v1/" @@ -25,11 +37,20 @@ ] +def force_single_tool_call(response: ChatResponse) -> None: + tool_calls = response.message.additional_kwargs.get("tool_calls", []) + if len(tool_calls) > 1: + response.message.additional_kwargs["tool_calls"] = [tool_calls[0]] + + class Model(BaseModel): id: str + base_model: Optional[str] + is_function_calling_model: Optional[bool] = False + is_chat_model: Optional[bool] = False -class NVIDIA(OpenAILike): +class NVIDIA(OpenAILike, FunctionCallingLLM): """NVIDIA's API Catalog Connector.""" _is_hosted: bool = PrivateAttr(True) @@ -37,7 +58,7 @@ class NVIDIA(OpenAILike): def __init__( self, - model: str = DEFAULT_MODEL, + model: Optional[str] = None, nvidia_api_key: Optional[str] = None, api_key: Optional[str] = None, base_url: Optional[str] = BASE_URL, @@ -81,15 +102,45 @@ def __init__( ) super().__init__( - model=model, api_key=api_key, api_base=base_url, max_tokens=max_tokens, - is_chat_model=True, + is_chat_model=is_chat_model(model), default_headers={"User-Agent": "llama-index-llms-nvidia"}, + is_function_calling_model=is_nvidia_function_calling_model(model), **kwargs, ) - self._is_hosted = is_hosted + self.model = model + self._is_hosted = base_url in KNOWN_URLS + + if self._is_hosted and api_key == "NO_API_KEY_PROVIDED": + warnings.warn( + "An API key is required for the hosted NIM. This will become an error in 0.2.0.", + ) + + if not model: + self.__get_default_model() + + def __get_default_model(self): + """Set default model.""" + if not self._is_hosted: + valid_models = [ + model.id + for model in self.available_models + if not model.base_model or model.base_model == model.id + ] + self.model = next(iter(valid_models), None) + if self.model: + warnings.warn( + f"Default model is set as: {self.model}. \n" + "Set model using model parameter. \n" + "To get available models use available_models property.", + UserWarning, + ) + else: + raise ValueError("No locally hosted model was found.") + else: + self.model = DEFAULT_MODEL def _validate_url(self, base_url): """ @@ -101,9 +152,7 @@ def _validate_url(self, base_url): expected_format = "Expected format is 'http://host:port'." result = urlparse(base_url) if not (result.scheme and result.netloc): - raise ValueError( - f"Invalid base_url, Expected format is 'http://host:port': {base_url}" - ) + raise ValueError(f"Invalid base_url, {expected_format}") if result.path: normalized_path = result.path.strip("/") if normalized_path == "v1": @@ -111,12 +160,20 @@ def _validate_url(self, base_url): elif normalized_path == "v1/chat/completions": warnings.warn(f"{expected_format} Rest is Ignored.") else: - raise ValueError(f"Base URL path is not recognized. {expected_format}") + raise ValueError(f"Invalid base_url, {expected_format}") return urlunparse((result.scheme, result.netloc, "v1", "", "", "")) @property def available_models(self) -> List[Model]: - models = self._get_client().models.list().data + models = [ + Model( + id=model.id, + base_model=getattr(model, "params", {}).get("root", None), + is_function_calling_model=is_nvidia_function_calling_model(model.id), + is_chat_model=is_chat_model(model.id), + ) + for model in self._get_client().models.list().data + ] # only exclude models in hosted mode. in non-hosted mode, the administrator has control # over the model name and may deploy an excluded name that will work. if self._is_hosted: @@ -165,3 +222,80 @@ def mode( self.api_key = api_key return self + + @property + def _is_chat_model(self) -> bool: + return is_chat_model(self.model) + + def _prepare_chat_with_tools( + self, + tools: List["BaseTool"], + user_msg: Optional[Union[str, ChatMessage]] = None, + chat_history: Optional[List[ChatMessage]] = None, + verbose: bool = False, + allow_parallel_tool_calls: bool = False, + **kwargs: Any, + ) -> Dict[str, Any]: + """Prepare the chat with tools.""" + # misralai uses the same openai tool format + tool_specs = [ + tool.metadata.to_openai_tool(skip_length_check=True) for tool in tools + ] + + if isinstance(user_msg, str): + user_msg = ChatMessage(role=MessageRole.USER, content=user_msg) + + messages = chat_history or [] + if user_msg: + messages.append(user_msg) + + return { + "messages": messages, + "tools": tool_specs or None, + **kwargs, + } + + def _validate_chat_with_tools_response( + self, + response: ChatResponse, + tools: List["BaseTool"], + allow_parallel_tool_calls: bool = False, + **kwargs: Any, + ) -> ChatResponse: + """Validate the response from chat_with_tools.""" + if not allow_parallel_tool_calls: + force_single_tool_call(response) + return response + + def get_tool_calls_from_response( + self, + response: "ChatResponse", + error_on_no_tool_call: bool = True, + ) -> List[ToolSelection]: + """Predict and call the tool.""" + tool_calls = response.message.additional_kwargs.get("tool_calls", []) + + if len(tool_calls) < 1: + if error_on_no_tool_call: + raise ValueError( + f"Expected at least one tool call, but got {len(tool_calls)} tool calls." + ) + else: + return [] + + tool_selections = [] + for tool_call in tool_calls: + # if not isinstance(tool_call, ToolCall): + # raise ValueError("Invalid tool_call object") + + argument_dict = json.loads(tool_call.function.arguments) + + tool_selections.append( + ToolSelection( + tool_id=tool_call.id, + tool_name=tool_call.function.name, + tool_kwargs=argument_dict, + ) + ) + + return tool_selections diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/utils.py b/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/utils.py index 9c6af24afc047..fc51a928e5736 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/utils.py +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/llama_index/llms/nvidia/utils.py @@ -32,6 +32,28 @@ "upstage/solar-10.7b-instruct": 4096, } +NVIDIA_FUNTION_CALLING_MODELS = ( + "nv-mistralai/mistral-nemo-12b-instruct", + "meta/llama-3.1-8b-instruct", + "meta/llama-3.1-70b-instruct", + "meta/llama-3.1-405b-instruct", + "mistralai/mistral-large-2-instruct", +) + +COMPLETION_MODELS = ( + "bigcode/starcoder2-7b", + "bigcode/starcoder2-15b", + "nvidia/mistral-nemo-minitron-8b-base", +) + + +def is_chat_model(modelname: str): + return modelname not in COMPLETION_MODELS + + +def is_nvidia_function_calling_model(modelname: str) -> bool: + return modelname in NVIDIA_FUNTION_CALLING_MODELS + def catalog_modelname_to_contextsize(modelname: str) -> Optional[int]: return API_CATALOG_MODELS.get(modelname, None) diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-nvidia/pyproject.toml index 5b816048d3305..44f2ab87598a8 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/pyproject.toml @@ -30,7 +30,7 @@ license = "MIT" name = "llama-index-llms-nvidia" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.0" +version = "0.2.4" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" @@ -47,7 +47,9 @@ mypy = "0.991" pre-commit = "3.2.0" pylint = "2.15.10" pytest = "7.2.1" +pytest-asyncio = "^0.23.0" pytest-mock = "3.11.1" +respx = "^0.21.1" ruff = "0.0.292" tree-sitter-languages = "^1.8.0" types-Deprecated = ">=0.1.0" @@ -58,6 +60,7 @@ types-requests = "2.28.11.8" # TODO: unpin when mypy>0.991 types-setuptools = "67.1.0.0" [tool.poetry.group.test_integration.dependencies] +pytest-httpx = "*" requests-mock = "^1.12.1" [tool.pytest.ini_options] diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_api_key.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_api_key.py index e040b2c4975c3..8e59eaa58e9f4 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_api_key.py +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_api_key.py @@ -5,6 +5,29 @@ from llama_index.llms.nvidia import NVIDIA from typing import Any +from pytest_httpx import HTTPXMock + + +@pytest.fixture() +def mock_local_models(httpx_mock: HTTPXMock): + mock_response = { + "data": [ + { + "id": "model1", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "model1", + } + ] + } + + httpx_mock.add_response( + url="https://test_url/v1/models", + method="GET", + json=mock_response, + status_code=200, + ) def get_api_key(instance: Any) -> str: @@ -16,6 +39,7 @@ def test_create_default_url_without_api_key(masked_env_var: str) -> None: NVIDIA() +@pytest.mark.usefixtures("mock_local_models") def test_create_unknown_url_without_api_key(masked_env_var: str) -> None: NVIDIA(base_url="https://test_url/v1") diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_base_url.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_base_url.py index 9e56da4a257a0..48682720b9bda 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_base_url.py +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_base_url.py @@ -1,27 +1,30 @@ -from urllib.parse import urlparse, urlunparse - import pytest -from requests_mock import Mocker from llama_index.llms.nvidia import NVIDIA as Interface +from pytest_httpx import HTTPXMock @pytest.fixture() -def mock_v1_local_models2(requests_mock: Mocker, base_url: str) -> None: - result = urlparse(base_url) - base_url = urlunparse((result.scheme, result.netloc, "v1", "", "", "")) - requests_mock.get( - f"{base_url}/models", - json={ - "data": [ - { - "id": "model1", - "object": "model", - "created": 1234567890, - "owned_by": "OWNER", - "root": "model1", - }, - ] - }, +def mock_local_models(httpx_mock: HTTPXMock, base_url: str): + mock_response = { + "data": [ + { + "id": "dummy", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "model1", + } + ] + } + + if base_url.endswith("/"): + base_url = base_url[:-1] + + httpx_mock.add_response( + url=f"{base_url}/models", + method="GET", + json=mock_response, + status_code=200, ) @@ -38,19 +41,18 @@ def mock_v1_local_models2(requests_mock: Mocker, base_url: str) -> None: "https://test_url/.../v1", ], ) -def test_base_url_invalid_not_hosted( - base_url: str, mock_v1_local_models2: None -) -> None: - with pytest.raises(ValueError): +def test_base_url_invalid_not_hosted(base_url: str) -> None: + with pytest.raises(ValueError) as msg: Interface(base_url=base_url) + assert "Invalid base_url" in str(msg.value) -@pytest.mark.parametrize("base_url", ["http://localhost:8080/v1/chat/completions"]) -def test_base_url_valid_not_hosted(base_url: str, mock_v1_local_models2: None) -> None: +@pytest.mark.parametrize("base_url", ["http://localhost:8080/v1/"]) +def test_base_url_valid_not_hosted(base_url: str, mock_local_models: None) -> None: with pytest.warns(UserWarning): Interface(base_url=base_url) @pytest.mark.parametrize("base_url", ["https://integrate.api.nvidia.com/v1/"]) -def test_base_url_valid_hosted(base_url: str, mock_v1_local_models2: None) -> None: +def test_base_url_valid_hosted(base_url: str) -> None: Interface(base_url=base_url) diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_mode_switch.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_mode_switch.py index 2a3734baa9507..225c27d7e37b4 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_mode_switch.py +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_mode_switch.py @@ -2,6 +2,39 @@ from llama_index.llms.nvidia import NVIDIA as Interface from llama_index.llms.nvidia.base import BASE_URL, KNOWN_URLS +from pytest_httpx import HTTPXMock + +UNKNOWN_URLS = [ + "https://test_url/v1", + "https://test_url/v1/", + "http://test_url/v1", + "http://test_url/v1/", +] + + +@pytest.fixture() +def mock_unknown_urls(httpx_mock: HTTPXMock, base_url: str): + mock_response = { + "data": [ + { + "id": "dummy", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "model1", + } + ] + } + + if base_url.endswith("/"): + base_url = base_url[:-1] + + httpx_mock.add_response( + url=f"{base_url}/models", + method="GET", + json=mock_response, + status_code=200, + ) def test_mode_switch_nvidia_throws_without_key_deprecated(masked_env_var: str): @@ -28,13 +61,14 @@ def test_mode_switch_nim_with_url_deprecated(): Interface().mode("nim", base_url="test") -def test_mode_switch_param_setting_deprecated(): +@pytest.mark.parametrize("base_url", ["https://test_url/v1/"]) +def test_mode_switch_param_setting_deprecated(base_url): instance = Interface(model="dummy") with pytest.warns(DeprecationWarning): - instance1 = instance.mode("nim", base_url="https://test_url/v1/") + instance1 = instance.mode("nim", base_url=base_url) assert instance1.model == "dummy" - assert str(instance1.api_base) == "https://test_url/v1/" + assert str(instance1.api_base) == base_url with pytest.warns(DeprecationWarning): instance2 = instance1.mode("nvidia", api_key="test", model="dummy-2") @@ -43,23 +77,17 @@ def test_mode_switch_param_setting_deprecated(): assert instance2.api_key == "test" -UNKNOWN_URLS = [ - "https://test_url/v1", - "https://test_url/v1/", - "http://test_url/v1", - "http://test_url/v1/", -] - - @pytest.mark.parametrize("base_url", UNKNOWN_URLS) -def test_mode_switch_unknown_base_url_without_key(masked_env_var: str, base_url: str): +def test_mode_switch_unknown_base_url_without_key( + mock_unknown_urls, masked_env_var: str, base_url: str +): Interface(base_url=base_url) @pytest.mark.parametrize("base_url", UNKNOWN_URLS) @pytest.mark.parametrize("param", ["nvidia_api_key", "api_key"]) def test_mode_switch_unknown_base_url_with_key( - masked_env_var: str, param: str, base_url: str + mock_unknown_urls, masked_env_var: str, param: str, base_url: str ): Interface(base_url=base_url, **{param: "test"}) diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_nvidia.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_nvidia.py index 131407f98f798..a8b60d16743e1 100644 --- a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_nvidia.py +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_nvidia.py @@ -15,6 +15,7 @@ from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, ChoiceDelta from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice from openai.types.completion import Completion, CompletionUsage +from pytest_httpx import HTTPXMock class CachedNVIDIApiKeys: @@ -116,6 +117,40 @@ def mock_chat_completion_stream_v1( yield from responses +@pytest.fixture() +def known_unknown() -> str: + return "mock-model" + + +@pytest.fixture() +def mock_local_models(httpx_mock: HTTPXMock): + mock_response = { + "data": [ + { + "id": "mock-model", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "mock-model", + }, + { + "id": "lora1", + "object": "model", + "created": 1234567890, + "owned_by": "OWNER", + "root": "mock-model", + }, + ] + } + + httpx_mock.add_response( + url="http://localhost:8000/v1/models", + method="GET", + json=mock_response, + status_code=200, + ) + + async def mock_async_chat_completion_stream_v1( *args: Any, **kwargs: Any ) -> AsyncGenerator[Completion, None]: @@ -223,3 +258,22 @@ def test_validates_api_key_is_present() -> None: def test_metadata() -> None: assert isinstance(NVIDIA().metadata, LLMMetadata) + + +def test_default_known(mock_local_models, known_unknown: str) -> None: + """ + Test that a model in the model table will be accepted. + """ + # check if default model is getting set + with pytest.warns(UserWarning): + x = NVIDIA(base_url="http://localhost:8000/v1") + assert x.model == known_unknown + + +def test_default_lora() -> None: + """ + Test that a model in the model table will be accepted. + """ + # find a model that matches the public_class under test + x = NVIDIA(base_url="http://localhost:8000/v1", model="lora1") + assert x.model == "lora1" diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_structured_output.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_structured_output.py new file mode 100644 index 0000000000000..2de55dea232d1 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_structured_output.py @@ -0,0 +1,134 @@ +import respx +from httpx import Response +from pydantic import BaseModel +from typing import List +from llama_index.llms.nvidia import NVIDIA as Interface +from llama_index.core.program import LLMTextCompletionProgram +from llama_index.core.program import FunctionCallingProgram +import pytest +from llama_index.llms.nvidia.utils import ( + NVIDIA_FUNTION_CALLING_MODELS, + API_CATALOG_MODELS, +) + + +class Song(BaseModel): + """Data model for a song.""" + + title: str + length_seconds: int + + +class Album(BaseModel): + """Data model for an album.""" + + name: str + artist: str + songs: List[Song] + + +prompt_template_str = """\ +Generate an example album, with an artist and a list of songs. \ +Using the movie {movie_name} as inspiration.\ +""" + + +def create_mock_chat_completion_v1_response() -> dict: + return { + "id": "chatcmpl-4162e407-e121-42b4-8590-1c173380be7d", + "object": "chat.completion", + "created": 1713474384, + "model": "mocked-model", + "usage": {"completion_tokens": 304, "prompt_tokens": 11, "total_tokens": 315}, + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": { + "content": None, + "text_offset": [], + "token_logprobs": [0.0, 0.0], + "tokens": [], + "top_logprobs": [], + }, + "message": { + "content": """{ + "name": "Greatest Hits", + "artist": "Best Artist", + "songs": [ + {"title": "Hit Song 1", "length_seconds": 180}, + {"title": "Hit Song 2", "length_seconds": 210} + ] + }""", + "role": "assistant", + "function_call": None, + "tool_calls": None, + }, + } + ], + } + + +@respx.mock +@pytest.mark.parametrize("model", NVIDIA_FUNTION_CALLING_MODELS) +def test_prompt_generation(model): + respx.post("https://integrate.api.nvidia.com/v1/chat/completions").mock( + return_value=Response(200, json=create_mock_chat_completion_v1_response()) + ) + + llm = Interface(api_key="BOGUS", model=model) + program = LLMTextCompletionProgram.from_defaults( + output_cls=Album, prompt_template_str=prompt_template_str, verbose=True, llm=llm + ) + + output = program(movie_name="The Shining") + assert isinstance(output, Album), f"Expected Album, but got {type(output)}" + assert isinstance(output.name, str), "Name should be a string" + assert isinstance(output.artist, str), "artist should be a string" + assert isinstance(output.songs, list), "Songs should be a list" + assert all( + isinstance(song, Song) for song in output.songs + ), "All songs should be of type Song" + + assert len(output.songs) > 0, "Album should contain at least one song" + + +@pytest.mark.parametrize( + "model", API_CATALOG_MODELS.keys() - NVIDIA_FUNTION_CALLING_MODELS +) +def test_unsupported_models(model: str): + llm = Interface(api_key="BOGUS", model=model) + + with pytest.raises(ValueError) as e: + FunctionCallingProgram.from_defaults( + output_cls=Album, + prompt_template_str=prompt_template_str, + verbose=True, + llm=llm, + ) + assert f"{model} does not support function calling API." in str(e.value) + + +@pytest.mark.asyncio() +@respx.mock +@pytest.mark.parametrize("model", NVIDIA_FUNTION_CALLING_MODELS) +async def test_async_program(model) -> None: + respx.post("https://integrate.api.nvidia.com/v1/chat/completions").mock( + return_value=Response(200, json=create_mock_chat_completion_v1_response()) + ) + + llm = Interface(api_key="BOGUS", model=model) + program = LLMTextCompletionProgram.from_defaults( + output_cls=Album, prompt_template_str=prompt_template_str, verbose=True, llm=llm + ) + + output = program(movie_name="The Shining") + assert isinstance(output, Album), f"Expected Album, but got {type(output)}" + assert isinstance(output.name, str), "Name should be a string" + assert isinstance(output.artist, str), "artist should be a string" + assert isinstance(output.songs, list), "Songs should be a list" + assert all( + isinstance(song, Song) for song in output.songs + ), "All songs should be of type Song" + + assert len(output.songs) > 0, "Album should contain at least one song" diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_text-completion.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_text-completion.py new file mode 100644 index 0000000000000..6a3d223e01571 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_text-completion.py @@ -0,0 +1,167 @@ +import os +from typing import Any, Optional, Generator, AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from llama_index.core.base.llms.types import ( + LLMMetadata, +) +from llama_index.llms.nvidia import NVIDIA as Interface +from llama_index.llms.nvidia.utils import COMPLETION_MODELS +from openai.types.completion import Completion, CompletionChoice, CompletionUsage + + +class CachedNVIDIApiKeys: + def __init__(self, set_env_key_to: Optional[str] = "", set_fake_key: bool = False): + self.set_env_key_to = set_env_key_to + self.set_fake_key = set_fake_key + + def __enter__(self) -> None: + self.api_env_was = os.environ.get("NVIDIA_API_KEY", "") + os.environ["NVIDIA_API_KEY"] = self.set_env_key_to + + if self.set_fake_key: + os.environ["NVIDIA_API_KEY"] = "nvai-" + "x" * 9 + "-" + "x" * 54 + + def __exit__(self, *exc: object) -> None: + if self.api_env_was == "": + del os.environ["NVIDIA_API_KEY"] + else: + os.environ["NVIDIA_API_KEY"] = self.api_env_was + + +def mock_completion_v1(*args, **kwargs) -> Completion: + model_name = kwargs.get("model") + return Completion( + id="cmpl-4162e407-e121-42b4-8590-1c173380be7d", + object="text_completion", + created=1713474384, + model=model_name, + usage=CompletionUsage( + completion_tokens=304, prompt_tokens=11, total_tokens=315 + ), + choices=[ + CompletionChoice( + finish_reason="stop", index=0, text="Cool Test Message", logprobs=None + ) + ], + ) + + +async def mock_async_completion_v1(*args: Any, **kwargs: Any) -> Completion: + return mock_completion_v1(*args, **kwargs) + + +def mock_completion_stream_v1(*args, **kwargs) -> Generator[Completion, None, None]: + model_name = kwargs.get("model") + responses = [ + Completion( + id="chatcmpl-998d9b96-0b71-41f5-b910-dd3bc00f38c6", + object="text_completion", + created=1713474736, + model=model_name, + choices=[CompletionChoice(text="Test", finish_reason="stop", index=0)], + ), + Completion( + id="chatcmpl-998d9b96-0b71-41f5-b910-dd3bc00f38c6", + object="text_completion", + created=1713474736, + model="google/gemma-7b", + choices=[ + CompletionChoice(text="Second Test", finish_reason="stop", index=0) + ], + ), + ] + + yield from responses + + +async def mock_async_completion_stream_v1( + *args: Any, **kwargs: Any +) -> AsyncGenerator[Completion, None]: + async def gen() -> AsyncGenerator[Completion, None]: + for response in mock_completion_stream_v1(*args, **kwargs): + yield response + + return gen() + + +@patch("llama_index.llms.openai.base.SyncOpenAI") +@pytest.mark.parametrize("model", COMPLETION_MODELS) +def test_model_completions(MockSyncOpenAI: MagicMock, model: str) -> None: + with CachedNVIDIApiKeys(set_fake_key=True): + mock_instance = MockSyncOpenAI.return_value + mock_instance.completions.create.return_value = mock_completion_v1(model=model) + + llm = Interface(model=model) + prompt = "test prompt" + + response = llm.complete(prompt) + assert response.text == "Cool Test Message" + + +def test_validates_api_key_is_present() -> None: + with CachedNVIDIApiKeys(set_fake_key=True): + assert Interface() + + os.environ["NVIDIA_API_KEY"] = "" + + assert Interface(api_key="nvai-" + "x" * 9 + "-" + "x" * 54) + + +def test_metadata() -> None: + assert isinstance(Interface().metadata, LLMMetadata) + + +@patch("llama_index.llms.openai.base.SyncOpenAI") +@pytest.mark.parametrize("model", COMPLETION_MODELS) +def test_completions_model_streaming(MockSyncOpenAI: MagicMock, model: str) -> None: + with CachedNVIDIApiKeys(set_fake_key=True): + mock_instance = MockSyncOpenAI.return_value + mock_instance.completions.create.return_value = mock_completion_stream_v1( + model=model + ) + + llm = Interface(model=model) + prompt = "test prompt" + + response_gen = llm.stream_complete(prompt) + responses = list(response_gen) + assert responses[-1].text == "TestSecond Test" + + +@pytest.mark.asyncio() +@patch("llama_index.llms.openai.base.AsyncOpenAI") +@pytest.mark.parametrize("model", COMPLETION_MODELS) +async def test_async_model_completions(MockAsyncOpenAI: MagicMock, model: str) -> None: + with CachedNVIDIApiKeys(set_fake_key=True): + mock_instance = MockAsyncOpenAI.return_value + create_fn = AsyncMock(call_args=(model,)) + create_fn.side_effect = mock_async_completion_v1 + mock_instance.completions.create = create_fn + + llm = Interface(model=model) + prompt = "test prompt" + + response = await llm.acomplete(prompt) + assert response.text == "Cool Test Message" + + +@pytest.mark.asyncio() +@patch("llama_index.llms.openai.base.AsyncOpenAI") +@pytest.mark.parametrize("model", COMPLETION_MODELS) +async def test_async_streaming_completion_model( + MockAsyncOpenAI: MagicMock, model: str +) -> None: + with CachedNVIDIApiKeys(set_fake_key=True): + mock_instance = MockAsyncOpenAI.return_value + create_fn = AsyncMock(call_args=(model,)) + create_fn.side_effect = mock_async_completion_stream_v1 + mock_instance.completions.create = create_fn + + llm = Interface(model=model) + prompt = "test prompt" + + response_gen = await llm.astream_complete(prompt) + responses = [response async for response in response_gen] + assert responses[-1].text == "TestSecond Test" diff --git a/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_tools.py b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_tools.py new file mode 100644 index 0000000000000..96e45cfecdd16 --- /dev/null +++ b/llama-index-integrations/llms/llama-index-llms-nvidia/tests/test_tools.py @@ -0,0 +1,421 @@ +import json +import uuid +from typing import ( + Any, + AsyncGenerator, + Generator, + List, +) +from unittest.mock import MagicMock, patch +from llama_index.core.base.agent.types import TaskStepOutput +import pytest +from llama_index.core.base.llms.types import ( + ChatMessage, +) +from llama_index.core.chat_engine.types import ( + AgentChatResponse, + StreamingAgentChatResponse, +) +from llama_index.core.tools.function_tool import FunctionTool +from llama_index.llms.nvidia import NVIDIA +from openai.types.chat.chat_completion import ChatCompletion, Choice +from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice +from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, ChoiceDelta +from openai.types.chat.chat_completion_message import ChatCompletionMessage +from openai.types.chat.chat_completion_message_tool_call import ( + ChatCompletionMessageToolCall, + Function, +) +from llama_index.core.agent import FunctionCallingAgentWorker + + +def mock_chat_completion(*args: Any, **kwargs: Any) -> ChatCompletion: + if "functions" in kwargs: + if not kwargs["functions"]: + raise ValueError("functions must not be empty") + + # Example taken from https://platform.openai.com/docs/api-reference/chat/create + return ChatCompletion( + id="chatcmpl-abc123", + object="chat.completion", + created=1677858242, + model="meta/llama-3.1-8b-instruct", + usage={"prompt_tokens": 13, "completion_tokens": 7, "total_tokens": 20}, + choices=[ + Choice( + message=ChatCompletionMessage( + role="assistant", content="\n\nThis is a test!" + ), + finish_reason="stop", + index=0, + logprobs=None, + ) + ], + ) + + +def mock_chat_completion_tool_call( + function: Function, *args: Any, **kwargs: Any +) -> ChatCompletion: + # Example taken from https://platform.openai.com/docs/api-reference/chat/create + return ChatCompletion( + id="chatcmpl-abc123", + object="chat.completion", + created=1677858242, + model="meta/llama-3.1-8b-instruct", + usage={"prompt_tokens": 13, "completion_tokens": 7, "total_tokens": 20}, + choices=[ + Choice( + message=ChatCompletionMessage( + role="assistant", + content="\n\nThis is a test!", + tool_calls=[ + ChatCompletionMessageToolCall( + id="toolcall-abc123", + function=function, + type="function", + ) + ], + ), + finish_reason="stop", + index=0, + logprobs=None, + ) + ], + ) + + +def mock_chat_stream( + *args: Any, **kwargs: Any +) -> Generator[ChatCompletionChunk, None, None]: + if "functions" in kwargs: + if not kwargs["functions"]: + raise ValueError("functions must not be empty") + + yield ChatCompletionChunk( + id="chatcmpl-abc123", + object="chat.completion.chunk", + created=1677858242, + model="meta/llama-3.1-8b-instruct", + choices=[ + ChunkChoice( + delta=ChoiceDelta(role="assistant", content="\n\nThis is a test!"), + finish_reason="stop", + index=0, + logprobs=None, + ) + ], + ) + + +@pytest.mark.asyncio() +async def mock_achat_completion(*args: Any, **kwargs: Any) -> ChatCompletion: + return mock_chat_completion(*args, **kwargs) + + +@pytest.mark.asyncio() +async def mock_achat_completion_tool_call( + function: Function, *args: Any, **kwargs: Any +) -> ChatCompletion: + return mock_chat_completion_tool_call(function, *args, **kwargs) + + +@pytest.mark.asyncio() +async def mock_achat_stream( + *args: Any, **kwargs: Any +) -> AsyncGenerator[ChatCompletionChunk, None]: + async def _mock_achat_stream( + *args: Any, **kwargs: Any + ) -> AsyncGenerator[ChatCompletionChunk, None]: + if "functions" in kwargs: + if not kwargs["functions"]: + raise ValueError("functions must not be empty") + + yield ChatCompletionChunk( + id="chatcmpl-abc123", + object="chat.completion.chunk", + created=1677858242, + model="gpt-3.5-turbo-0301", + choices=[ + ChunkChoice( + delta=ChoiceDelta(role="assistant", content="\n\nThis is a test!"), + finish_reason="stop", + index=0, + logprobs=None, + ) + ], + ) + + return _mock_achat_stream(*args, **kwargs) + + +@pytest.fixture() +def add_tool() -> FunctionTool: + def add(a: int, b: int) -> int: + """Add two integers and returns the result integer.""" + return a + b + + return FunctionTool.from_defaults(fn=add) + + +@pytest.fixture() +def echo_tool() -> FunctionTool: + def echo(query: str) -> str: + """Echos input.""" + return query + + return FunctionTool.from_defaults(fn=echo) + + +@pytest.fixture() +def malformed_echo_function() -> Function: + test_result: str = "This is a test" + return Function(name="echo", arguments=f'query = "{test_result}"') + + +@pytest.fixture() +def echo_function() -> Function: + test_result: str = "This is a test" + return Function(name="echo", arguments=json.dumps({"query": test_result})) + + +MOCK_ACTION_RESPONSE = """\ +Thought: I need to use a tool to help me answer the question. +Action: add +Action Input: {"a": 1, "b": 1} +""" + +MOCK_FINAL_RESPONSE = """\ +Thought: I have enough information to answer the question without using any more tools. +Answer: 2 +""" + + +@patch("llama_index.llms.openai.base.SyncOpenAI") +def test_chat_basic( + MockSyncOpenAI: MagicMock, add_tool: FunctionTool, masked_env_var +) -> None: + mock_instance = MockSyncOpenAI.return_value + mock_instance.chat.completions.create.return_value = mock_chat_completion() + + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + + agent = FunctionCallingAgentWorker.from_tools( + tools=[add_tool], + llm=llm, + ).as_agent() + response = agent.chat("What is 1 + 1?") + assert isinstance(response, AgentChatResponse) + assert response.response == "\n\nThis is a test!" + assert len(agent.chat_history) == 2 + assert agent.chat_history[0].content == "What is 1 + 1?" + assert agent.chat_history[1].content == "\n\nThis is a test!" + + +@pytest.mark.asyncio() +@patch("llama_index.llms.openai.base.AsyncOpenAI") +async def test_achat_basic( + MockAsyncOpenAI: MagicMock, add_tool: FunctionTool, masked_env_var +) -> None: + mock_instance = MockAsyncOpenAI.return_value + mock_instance.chat.completions.create.return_value = mock_achat_completion() + + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + + agent = FunctionCallingAgentWorker.from_tools( + tools=[add_tool], + llm=llm, + ).as_agent() + response = await agent.achat("What is 1 + 1?") + assert isinstance(response, AgentChatResponse) + assert response.response == "\n\nThis is a test!" + assert len(agent.chat_history) == 2 + assert agent.chat_history[0].content == "What is 1 + 1?" + assert agent.chat_history[1].content == "\n\nThis is a test!" + + +@pytest.mark.xfail( + reason="streaming not yet implemented, see https://github.com/run-llama/llama_index/discussions/14653 and https://github.com/run-llama/llama_index/issues/15079" +) +@pytest.mark.asyncio() +@patch("llama_index.llms.openai.base.AsyncOpenAI") +async def test_astream_chat_basic( + MockAsyncOpenAI: MagicMock, add_tool: FunctionTool, masked_env_var +) -> None: + mock_instance = MockAsyncOpenAI.return_value + mock_instance.chat.completions.create.side_effect = mock_achat_stream + + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + + agent = FunctionCallingAgentWorker.from_tools( + tools=[add_tool], + llm=llm, + ).as_agent() + response_stream = await agent.astream_chat("What is 1 + 1?") + async for response in response_stream.async_response_gen(): + pass + assert isinstance(response_stream, StreamingAgentChatResponse) + # str() strips newline values + assert response == "\n\nThis is a test!" + assert len(agent.chat_history) == 2 + assert agent.chat_history[0].content == "What is 1 + 1?" + assert agent.chat_history[1].content == "This is a test!" + + +@patch("llama_index.llms.openai.base.SyncOpenAI") +def test_chat_no_functions(MockSyncOpenAI: MagicMock, masked_env_var) -> None: + mock_instance = MockSyncOpenAI.return_value + mock_instance.chat.completions.create.return_value = mock_chat_completion() + + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + + agent = FunctionCallingAgentWorker.from_tools( + llm=llm, + ).as_agent() + response = agent.chat("What is 1 + 1?") + assert isinstance(response, AgentChatResponse) + assert response.response == "\n\nThis is a test!" + + +@patch("llama_index.llms.openai.base.SyncOpenAI") +def test_add_step( + MockSyncOpenAI: MagicMock, add_tool: FunctionTool, masked_env_var +) -> None: + """Test add step.""" + mock_instance = MockSyncOpenAI.return_value + mock_instance.chat.completions.create.return_value = mock_chat_completion() + + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + # sync + agent = FunctionCallingAgentWorker.from_tools( + tools=[add_tool], + llm=llm, + ).as_agent() + ## NOTE: can only take a single step before finishing, + # since mocked chat output does not call any tools + task = agent.create_task("What is 1 + 1?") + step_output = agent.run_step(task.task_id) + assert str(step_output) == "\n\nThis is a test!" + + # add human input (not used but should be in memory) + task = agent.create_task("What is 1 + 1?") + step_output = agent.run_step(task.task_id, input="tmp") + chat_history: List[ChatMessage] = task.extra_state["new_memory"].get_all() + assert "tmp" in [m.content for m in chat_history] + + # # stream_step + # agent = FunctionCallingAgentWorker.from_tools( + # tools=[add_tool], + # llm=llm, + # ) + # task = agent.create_task("What is 1 + 1?") + # # first step + # step_output = agent.stream_step(task.task_id) + # # add human input (not used but should be in memory) + # step_output = agent.stream_step(task.task_id, input="tmp") + # chat_history: List[ChatMessage] = task.extra_state["new_memory"].get_all() + # assert "tmp" in [m.content for m in chat_history] + + +@pytest.mark.xfail( + reason="streaming not yet implemented, see https://github.com/run-llama/llama_index/discussions/14653 and https://github.com/run-llama/llama_index/issues/15079" +) +@pytest.mark.asyncio() +@patch("llama_index.llms.openai.base.AsyncOpenAI") +async def test_async_add_step( + MockAsyncOpenAI: MagicMock, add_tool: FunctionTool, masked_env_var +) -> None: + mock_instance = MockAsyncOpenAI.return_value + + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + # async + agent = FunctionCallingAgentWorker.from_tools( + tools=[add_tool], + llm=llm, + ).as_agent() + task = agent.create_task("What is 1 + 1?") + # first step + mock_instance.chat.completions.create.return_value = mock_achat_completion() + step_output = await agent.arun_step(task.task_id) + # add human input (not used but should be in memory) + task = agent.create_task("What is 1 + 1?") + mock_instance.chat.completions.create.return_value = mock_achat_completion() + step_output = await agent.arun_step(task.task_id, input="tmp") + chat_history: List[ChatMessage] = task.extra_state["new_memory"].get_all() + assert "tmp" in [m.content for m in chat_history] + + # async stream step + agent = FunctionCallingAgentWorker.from_tools( + tools=[add_tool], + llm=llm, + ).as_agent() + task = agent.create_task("What is 1 + 1?") + # first step + mock_instance.chat.completions.create.side_effect = mock_achat_stream + step_output = await agent.astream_step(task.task_id) + # add human input (not used but should be in memory) + task = agent.create_task("What is 1 + 1?") + mock_instance.chat.completions.create.side_effect = mock_achat_stream + + # stream the output to ensure it gets written to memory + step_output = await agent.astream_step(task.task_id, input="tmp") + async for _ in step_output.output.async_response_gen(): + pass + + chat_history = task.memory.get_all() + assert "tmp" in [m.content for m in chat_history] + + +@pytest.mark.asyncio() +@pytest.mark.parametrize("method", ["run_step", "arun_step"]) +@patch("llama_index.llms.openai.base.SyncOpenAI") +@patch("llama_index.llms.openai.base.AsyncOpenAI") +async def test_run_step_returns_correct_sources_history( + MockAsyncOpenAI: MagicMock, + MockSyncOpenAI: MagicMock, + method: str, + echo_tool: FunctionTool, + echo_function: Function, + masked_env_var, +) -> None: + num_steps = 4 + llm = NVIDIA(model="meta/llama-3.1-8b-instruct") + agent = FunctionCallingAgentWorker.from_tools( + tools=[echo_tool], + llm=llm, + ).as_agent() + task = agent.create_task("") + step_outputs: List[TaskStepOutput] = [] + + if method == "run_step": + mock_instance = MockSyncOpenAI.return_value + mock_instance.chat.completions.create.return_value = ( + mock_chat_completion_tool_call(echo_function) + ) + else: + mock_instance = MockAsyncOpenAI.return_value + mock_instance.chat.completions.create.side_effect = [ + mock_achat_completion_tool_call(echo_function) for _ in range(num_steps) + ] + + # Create steps + steps = [agent.agent_worker.initialize_step(task)] + for step_idx in range(num_steps - 1): + steps.append( + steps[-1].get_next_step( + step_id=str(uuid.uuid4()), + input=None, + ) + ) + + # Run each step, invoking a single tool call each time + for step_idx in range(num_steps): + step_outputs.append( + agent.agent_worker.run_step(steps[step_idx], task) + if method == "run_step" + else await agent.agent_worker.arun_step(steps[step_idx], task) + ) + + # Ensure that each step only has one source for its one tool call + for step_idx in range(num_steps): + assert len(step_outputs[step_idx].output.sources) == 1 diff --git a/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py b/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py index 4264a0ffb2271..50d44a6ca0554 100644 --- a/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py +++ b/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py @@ -184,6 +184,20 @@ def _convert_to_ollama_messages(self, messages: Sequence[ChatMessage]) -> Dict: for message in messages ] + def _get_response_token_counts(self, raw_response: dict) -> dict: + """Get the token usage reported by the response.""" + try: + prompt_tokens = raw_response["prompt_eval_count"] + completion_tokens = raw_response["eval_count"] + total_tokens = prompt_tokens + completion_tokens + except KeyError: + return {} + return { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens, + "total_tokens": total_tokens, + } + def _prepare_chat_with_tools( self, tools: List["BaseTool"], @@ -268,6 +282,9 @@ def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: ) tool_calls = response["message"].get("tool_calls", []) + token_counts = self._get_response_token_counts(response) + if token_counts: + response["usage"] = token_counts return ChatResponse( message=ChatMessage( @@ -305,6 +322,9 @@ def gen() -> ChatResponseGen: response_txt += r["message"]["content"] tool_calls = r["message"].get("tool_calls", []) + token_counts = self._get_response_token_counts(r) + if token_counts: + r["usage"] = token_counts yield ChatResponse( message=ChatMessage( @@ -345,6 +365,9 @@ async def gen() -> ChatResponseAsyncGen: response_txt += r["message"]["content"] tool_calls = r["message"].get("tool_calls", []) + token_counts = self._get_response_token_counts(r) + if token_counts: + r["usage"] = token_counts yield ChatResponse( message=ChatMessage( @@ -376,6 +399,9 @@ async def achat( ) tool_calls = response["message"].get("tool_calls", []) + token_counts = self._get_response_token_counts(response) + if token_counts: + response["usage"] = token_counts return ChatResponse( message=ChatMessage( diff --git a/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml index 4b517f661a11c..1e9e1a34c8d17 100644 --- a/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-ollama" readme = "README.md" -version = "0.3.0" +version = "0.3.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py b/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py index c0b072ab85fd7..6a57226b5b237 100644 --- a/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py +++ b/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py @@ -2,6 +2,7 @@ from typing import ( TYPE_CHECKING, Any, + Generator, Awaitable, Callable, Dict, @@ -49,8 +50,9 @@ ) from llama_index.core.llms.function_calling import FunctionCallingLLM from llama_index.core.llms.llm import ToolSelection -from llama_index.core.types import BaseOutputParser, PydanticProgramMode +from llama_index.core.types import BaseOutputParser, PydanticProgramMode, Model from llama_index.llms.openai.utils import ( + O1_MODELS, OpenAIToolCall, create_retry_decorator, from_openai_completion_logprobs, @@ -62,6 +64,9 @@ resolve_openai_credentials, to_openai_message_dicts, ) +from llama_index.core.bridge.pydantic import ( + BaseModel, +) from openai import AsyncOpenAI, AzureOpenAI from openai import OpenAI as SyncOpenAI @@ -72,6 +77,10 @@ ) from llama_index.core.llms.utils import parse_partial_json +import llama_index.core.instrumentation as instrument + +dispatcher = instrument.get_dispatcher(__name__) + if TYPE_CHECKING: from llama_index.core.tools.types import BaseTool @@ -158,8 +167,8 @@ class OpenAI(FunctionCallingLLM): temperature: float = Field( default=DEFAULT_TEMPERATURE, description="The temperature to use during generation.", - gte=0.0, - lte=1.0, + ge=0.0, + le=1.0, ) max_tokens: Optional[int] = Field( description="The maximum number of tokens to generate.", @@ -172,8 +181,8 @@ class OpenAI(FunctionCallingLLM): top_logprobs: int = Field( description="The number of top token log probs to return.", default=0, - gte=0, - lte=20, + ge=0, + le=20, ) additional_kwargs: Dict[str, Any] = Field( default_factory=dict, description="Additional kwargs for the OpenAI API." @@ -181,12 +190,12 @@ class OpenAI(FunctionCallingLLM): max_retries: int = Field( default=3, description="The maximum number of API retries.", - gte=0, + ge=0, ) timeout: float = Field( default=60.0, description="The timeout, in seconds, for API requests.", - gte=0, + ge=0, ) default_headers: Optional[Dict[str, str]] = Field( default=None, description="The default headers for API requests." @@ -245,6 +254,10 @@ def __init__( api_version=api_version, ) + # TODO: Temp forced to 1.0 for o1 + if model in O1_MODELS: + temperature = 1.0 + super().__init__( model=model, temperature=temperature, @@ -323,6 +336,10 @@ def metadata(self) -> LLMMetadata: model=self._get_model_name() ), model_name=self.model, + # TODO: Temp for O1 beta + system_role=MessageRole.USER + if self.model in O1_MODELS + else MessageRole.SYSTEM, ) @llm_chat_callback() @@ -402,7 +419,7 @@ def _get_model_kwargs(self, **kwargs: Any) -> Dict[str, Any]: @llm_retry_decorator def _chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: client = self._get_client() - message_dicts = to_openai_message_dicts(messages) + message_dicts = to_openai_message_dicts(messages, model=self.model) if self.reuse_client: response = client.chat.completions.create( @@ -484,7 +501,7 @@ def _stream_chat( self, messages: Sequence[ChatMessage], **kwargs: Any ) -> ChatResponseGen: client = self._get_client() - message_dicts = to_openai_message_dicts(messages) + message_dicts = to_openai_message_dicts(messages, model=self.model) def gen() -> ChatResponseGen: content = "" @@ -574,7 +591,7 @@ def gen() -> CompletionResponseGen: text = "" for response in client.completions.create( prompt=prompt, - **kwargs, + **all_kwargs, ): if len(response.choices) > 0: delta = response.choices[0].text @@ -690,7 +707,7 @@ async def _achat( self, messages: Sequence[ChatMessage], **kwargs: Any ) -> ChatResponse: aclient = self._get_aclient() - message_dicts = to_openai_message_dicts(messages) + message_dicts = to_openai_message_dicts(messages, model=self.model) if self.reuse_client: response = await aclient.chat.completions.create( @@ -723,7 +740,7 @@ async def _astream_chat( self, messages: Sequence[ChatMessage], **kwargs: Any ) -> ChatResponseAsyncGen: aclient = self._get_aclient() - message_dicts = to_openai_message_dicts(messages) + message_dicts = to_openai_message_dicts(messages, model=self.model) async def gen() -> ChatResponseAsyncGen: content = "" @@ -940,3 +957,55 @@ def get_tool_calls_from_response( ) return tool_selections + + @dispatcher.span + def structured_predict( + self, *args: Any, llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any + ) -> BaseModel: + """Structured predict.""" + llm_kwargs = llm_kwargs or {} + llm_kwargs["tool_choice"] = ( + "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"] + ) + # by default structured prediction uses function calling to extract structured outputs + # here we force tool_choice to be required + return super().structured_predict(*args, llm_kwargs=llm_kwargs, **kwargs) + + @dispatcher.span + async def astructured_predict( + self, *args: Any, llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any + ) -> BaseModel: + """Structured predict.""" + llm_kwargs = llm_kwargs or {} + llm_kwargs["tool_choice"] = ( + "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"] + ) + # by default structured prediction uses function calling to extract structured outputs + # here we force tool_choice to be required + return await super().astructured_predict(*args, llm_kwargs=llm_kwargs, **kwargs) + + @dispatcher.span + def stream_structured_predict( + self, *args: Any, llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any + ) -> Generator[Union[Model, List[Model]], None, None]: + """Stream structured predict.""" + llm_kwargs = llm_kwargs or {} + llm_kwargs["tool_choice"] = ( + "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"] + ) + # by default structured prediction uses function calling to extract structured outputs + # here we force tool_choice to be required + return super().stream_structured_predict(*args, llm_kwargs=llm_kwargs, **kwargs) + + @dispatcher.span + def stream_structured_predict( + self, *args: Any, llm_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any + ) -> Generator[Union[Model, List[Model]], None, None]: + """Stream structured predict.""" + llm_kwargs = llm_kwargs or {} + llm_kwargs["tool_choice"] = ( + "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"] + ) + # by default structured prediction uses function calling to extract structured outputs + # here we force tool_choice to be required + return super().stream_structured_predict(*args, llm_kwargs=llm_kwargs, **kwargs) diff --git a/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/utils.py b/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/utils.py index fa1db91a1ac7d..ead7400c9b8b1 100644 --- a/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/utils.py +++ b/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/utils.py @@ -29,6 +29,10 @@ DEFAULT_OPENAI_API_BASE = "https://api.openai.com/v1" DEFAULT_OPENAI_API_VERSION = "" +O1_MODELS: Dict[str, int] = { + "o1-preview": 128000, + "o1-mini": 128000, +} GPT4_MODELS: Dict[str, int] = { # stable model names: @@ -109,6 +113,7 @@ } ALL_AVAILABLE_MODELS = { + **O1_MODELS, **GPT4_MODELS, **TURBO_MODELS, **GPT3_5_MODELS, @@ -117,6 +122,7 @@ } CHAT_MODELS = { + **O1_MODELS, **GPT4_MODELS, **TURBO_MODELS, **AZURE_TURBO_MODELS, @@ -220,11 +226,15 @@ def is_chat_model(model: str) -> bool: def is_function_calling_model(model: str) -> bool: is_chat_model_ = is_chat_model(model) is_old = "0314" in model or "0301" in model - return is_chat_model_ and not is_old + + # TODO: This is temporary for openai's beta + is_o1_beta = "o1" in model + + return is_chat_model_ and not is_old and not is_o1_beta def to_openai_message_dict( - message: ChatMessage, drop_none: bool = False + message: ChatMessage, drop_none: bool = False, model: Optional[str] = None ) -> ChatCompletionMessageParam: """Convert generic message to OpenAI message dict.""" message_dict = { @@ -232,6 +242,11 @@ def to_openai_message_dict( "content": message.content, } + # TODO: O1 models do not support system prompts + if model is not None and model in O1_MODELS: + if message_dict["role"] == "system": + message_dict["role"] = "user" + # NOTE: openai messages have additional arguments: # - function messages have `name` # - assistant messages have optional `function_call` @@ -247,11 +262,14 @@ def to_openai_message_dict( def to_openai_message_dicts( - messages: Sequence[ChatMessage], drop_none: bool = False + messages: Sequence[ChatMessage], + drop_none: bool = False, + model: Optional[str] = None, ) -> List[ChatCompletionMessageParam]: """Convert generic messages to OpenAI message dicts.""" return [ - to_openai_message_dict(message, drop_none=drop_none) for message in messages + to_openai_message_dict(message, drop_none=drop_none, model=model) + for message in messages ] diff --git a/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml index fe2d559d3ddce..2a66303751696 100644 --- a/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml @@ -29,12 +29,13 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-openai" readme = "README.md" -version = "0.2.0" +version = "0.2.6" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" openai = "^1.40.0" -llama-index-core = "^0.11.0" +llama-index-core = "^0.11.7" +llama-index-agent-openai = "^0.3.1" [tool.poetry.group.dev.dependencies] ipython = "8.10.0" diff --git a/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/llama_index/llms/sagemaker_endpoint/base.py b/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/llama_index/llms/sagemaker_endpoint/base.py index 8de20bc525513..a419a2a6bf46a 100644 --- a/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/llama_index/llms/sagemaker_endpoint/base.py +++ b/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/llama_index/llms/sagemaker_endpoint/base.py @@ -163,6 +163,10 @@ def __init__( model_kwargs=model_kwargs, content_handler=content_handler, profile_name=profile_name, + region_name=region_name, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, timeout=timeout, max_retries=max_retries, callback_manager=callback_manager, diff --git a/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/pyproject.toml index 65a68efbb98f6..03b44a9a7a4f7 100644 --- a/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-sagemaker-endpoint/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-sagemaker-endpoint" readme = "README.md" -version = "0.2.1" +version = "0.2.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/utils.py b/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/utils.py index 2d5ed11466657..8e85fe5bad5c0 100644 --- a/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/utils.py +++ b/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/utils.py @@ -47,6 +47,7 @@ def _create_retry_decorator(max_retries: int) -> Callable[[Any], Any]: | retry_if_exception_type(google.api_core.exceptions.ResourceExhausted) | retry_if_exception_type(google.api_core.exceptions.Aborted) | retry_if_exception_type(google.api_core.exceptions.DeadlineExceeded) + | retry_if_exception_type(google.api_core.exceptions.InternalServerError) ), before_sleep=before_sleep_log(logger, logging.WARNING), ) diff --git a/llama-index-integrations/llms/llama-index-llms-vertex/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-vertex/pyproject.toml index dbcbdee2c87a4..1b9de3579ca7f 100644 --- a/llama-index-integrations/llms/llama-index-llms-vertex/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-vertex/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-vertex" readme = "README.md" -version = "0.3.3" +version = "0.3.4" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/base.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/base.py index 69924c3d539c0..78027ad599323 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/base.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/base.py @@ -22,7 +22,7 @@ MultiModalLLM, MultiModalLLMMetadata, ) -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode from llama_index.multi_modal_llms.anthropic.utils import ( ANTHROPIC_MULTI_MODAL_MODELS, generate_anthropic_multi_modal_chat_message, @@ -149,7 +149,7 @@ def _get_multi_modal_chat_messages( self, prompt: str, role: str, - image_documents: Sequence[ImageDocument], + image_documents: Sequence[ImageNode], **kwargs: Any, ) -> List[Dict]: return generate_anthropic_multi_modal_chat_message( @@ -187,7 +187,7 @@ def _get_response_token_counts(self, raw_response: Any) -> dict: } def _complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -208,7 +208,7 @@ def _complete( ) def _stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -239,12 +239,12 @@ def gen() -> CompletionResponseGen: return gen() def complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: return self._complete(prompt, image_documents, **kwargs) def stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: return self._stream_complete(prompt, image_documents, **kwargs) @@ -263,7 +263,7 @@ def stream_chat( # ===== Async Endpoints ===== async def _acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -283,12 +283,12 @@ async def _acomplete( ) async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: return await self._acomplete(prompt, image_documents, **kwargs) async def _astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -319,7 +319,7 @@ async def gen() -> CompletionResponseAsyncGen: return gen() async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: return await self._astream_complete(prompt, image_documents, **kwargs) diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/utils.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/utils.py index 40aefd749e633..9cd92f6927ab7 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/utils.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/llama_index/multi_modal_llms/anthropic/utils.py @@ -1,6 +1,7 @@ import base64 import logging from typing import Any, Dict, List, Optional, Sequence, Tuple +import filetype import httpx from llama_index.core.base.llms.generic_utils import get_from_param_or_env @@ -29,7 +30,18 @@ logger = logging.getLogger(__name__) -def infer_image_mimetype(image_file_path: str) -> str: +def infer_image_mimetype_from_base64(base64_string) -> str: + # Decode the base64 string + decoded_data = base64.b64decode(base64_string) + + # Use filetype to guess the MIME type + kind = filetype.guess(decoded_data) + + # Return the MIME type if detected, otherwise return None + return kind.mime if kind is not None else None + + +def infer_image_mimetype_from_file_path(image_file_path: str) -> str: # Get the file extension file_extension = image_file_path.split(".")[-1].lower() @@ -64,7 +76,7 @@ def generate_anthropic_multi_modal_chat_message( for image_document in image_documents: image_content: Dict[str, Any] = {} if image_document.image_path and image_document.image_path != "": - mimetype = infer_image_mimetype(image_document.image_path) + mimetype = infer_image_mimetype_from_file_path(image_document.image_path) base64_image = encode_image(image_document.image_path) image_content = { "type": "image", @@ -78,7 +90,9 @@ def generate_anthropic_multi_modal_chat_message( "file_path" in image_document.metadata and image_document.metadata["file_path"] != "" ): - mimetype = infer_image_mimetype(image_document.metadata["file_path"]) + mimetype = infer_image_mimetype_from_file_path( + image_document.metadata["file_path"] + ) base64_image = encode_image(image_document.metadata["file_path"]) image_content = { "type": "image", @@ -89,7 +103,7 @@ def generate_anthropic_multi_modal_chat_message( }, } elif image_document.image_url and image_document.image_url != "": - mimetype = infer_image_mimetype(image_document.image_url) + mimetype = infer_image_mimetype_from_file_path(image_document.image_url) image_content = { "type": "image", "source": { @@ -100,6 +114,17 @@ def generate_anthropic_multi_modal_chat_message( ).decode("utf-8"), }, } + elif image_document.image != "": + base64_image = image_document.image + mimetype = infer_image_mimetype_from_base64(base64_image) + image_content = { + "type": "image", + "source": { + "type": "base64", + "media_type": mimetype, + "data": base64_image, + }, + } completion_content.append(image_content) completion_content.append({"type": "text", "text": prompt}) diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/pyproject.toml b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/pyproject.toml index 22bb056489226..3d8f3c95b9ec3 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/pyproject.toml +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-anthropic/pyproject.toml @@ -27,12 +27,13 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-multi-modal-llms-anthropic" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" anthropic = ">=0.26.2, <0.29.0" llama-index-core = "^0.11.0" +filetype = "^1.2.0" [tool.poetry.group.dev.dependencies] ipython = "8.10.0" diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-dashscope/llama_index/multi_modal_llms/dashscope/base.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-dashscope/llama_index/multi_modal_llms/dashscope/base.py index a020f64c1208e..dc19ae2ad096e 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-dashscope/llama_index/multi_modal_llms/dashscope/base.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-dashscope/llama_index/multi_modal_llms/dashscope/base.py @@ -17,7 +17,7 @@ from llama_index.core.bridge.pydantic import Field from llama_index.core.callbacks import CallbackManager from llama_index.core.multi_modal_llms.base import MultiModalLLM -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode from llama_index.multi_modal_llms.dashscope.utils import ( chat_message_to_dashscope_multi_modal_messages, dashscope_response_to_chat_response, @@ -137,7 +137,7 @@ def _get_default_parameters(self) -> Dict: return params def _get_input_parameters( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> Tuple[ChatMessage, Dict]: parameters = self._get_default_parameters() parameters.update(kwargs) @@ -155,7 +155,7 @@ def _get_input_parameters( return message, parameters def complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: message, parameters = self._get_input_parameters( prompt, image_documents, **kwargs @@ -172,7 +172,7 @@ def complete( return dashscope_response_to_completion_response(response) def stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: message, parameters = self._get_input_parameters( prompt, image_documents, **kwargs @@ -260,12 +260,12 @@ def gen() -> ChatResponseGen: # TODO: use proper async methods async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: return self.complete(prompt, image_documents, **kwargs) async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: raise Exception("Not supported") diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-gemini/llama_index/multi_modal_llms/gemini/base.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-gemini/llama_index/multi_modal_llms/gemini/base.py index c1f5966dc1bfb..c4dfab02f5248 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-gemini/llama_index/multi_modal_llms/gemini/base.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-gemini/llama_index/multi_modal_llms/gemini/base.py @@ -21,7 +21,7 @@ MultiModalLLM, MultiModalLLMMetadata, ) -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode from llama_index.llms.gemini.utils import ( ROLES_FROM_GEMINI, chat_from_gemini_response, @@ -158,14 +158,14 @@ def metadata(self) -> MultiModalLLMMetadata: ) def complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: images = [PIL.Image.open(doc.resolve_image()) for doc in image_documents] result = self._model.generate_content([prompt, *images], **kwargs) return completion_from_gemini_response(result) def stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: images = [PIL.Image.open(doc.resolve_image()) for doc in image_documents] result = self._model.generate_content([prompt, *images], stream=True, **kwargs) @@ -206,14 +206,14 @@ def gen() -> ChatResponseGen: return gen() async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: images = [PIL.Image.open(doc.resolve_image()) for doc in image_documents] result = await self._model.generate_content_async([prompt, *images], **kwargs) return completion_from_gemini_response(result) async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: images = [PIL.Image.open(doc.resolve_image()) for doc in image_documents] ait = await self._model.generate_content_async( diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/llama_index/multi_modal_llms/ollama/base.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/llama_index/multi_modal_llms/ollama/base.py index c4e44dca3e769..ca34aeae82d5d 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/llama_index/multi_modal_llms/ollama/base.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/llama_index/multi_modal_llms/ollama/base.py @@ -19,7 +19,7 @@ MultiModalLLMMetadata, ) from llama_index.core.multi_modal_llms.generic_utils import image_documents_to_base64 -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode def get_additional_kwargs( @@ -152,7 +152,7 @@ def stream_chat( def complete( self, prompt: str, - image_documents: Sequence[ImageDocument], + image_documents: Sequence[ImageNode], formatted: bool = False, **kwargs: Any, ) -> CompletionResponse: @@ -174,7 +174,7 @@ def complete( def stream_complete( self, prompt: str, - image_documents: Sequence[ImageDocument], + image_documents: Sequence[ImageNode], formatted: bool = False, **kwargs: Any, ) -> CompletionResponseGen: @@ -201,7 +201,7 @@ def stream_complete( ) async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: raise NotImplementedError("Ollama does not support async completion.") @@ -211,7 +211,7 @@ async def achat( raise NotImplementedError("Ollama does not support async chat.") async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: raise NotImplementedError("Ollama does not support async streaming completion.") diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/pyproject.toml b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/pyproject.toml index 332466d21c1c3..93a5f36bd2b25 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/pyproject.toml +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-ollama/pyproject.toml @@ -27,11 +27,11 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-multi-modal-llms-ollama" readme = "README.md" -version = "0.2.0" +version = "0.3.0" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -ollama = "^0.1.6" +ollama = ">=0.3.0" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-openai/llama_index/multi_modal_llms/openai/base.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-openai/llama_index/multi_modal_llms/openai/base.py index 0b2a7d539c24c..58681dbd2c5b4 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-openai/llama_index/multi_modal_llms/openai/base.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-openai/llama_index/multi_modal_llms/openai/base.py @@ -23,7 +23,7 @@ ) from llama_index.core.llms.callbacks import llm_chat_callback, llm_completion_callback from llama_index.core.multi_modal_llms import MultiModalLLM, MultiModalLLMMetadata -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode from llama_index.llms.openai.utils import ( from_openai_message, resolve_openai_credentials, @@ -163,7 +163,7 @@ def _get_multi_modal_chat_messages( self, prompt: str, role: str, - image_documents: Sequence[ImageDocument], + image_documents: Sequence[ImageNode], **kwargs: Any, ) -> List[ChatCompletionMessageParam]: return to_openai_message_dicts( @@ -210,7 +210,7 @@ def _get_response_token_counts(self, raw_response: Any) -> dict: @llm_completion_callback() def _complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -248,7 +248,7 @@ def _chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse: @llm_completion_callback() def _stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -332,12 +332,12 @@ def gen() -> ChatResponseGen: return gen() def complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: return self._complete(prompt, image_documents, **kwargs) def stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: return self._stream_complete(prompt, image_documents, **kwargs) @@ -359,7 +359,7 @@ def stream_chat( @llm_completion_callback() async def _acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -378,13 +378,13 @@ async def _acomplete( ) async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: return await self._acomplete(prompt, image_documents, **kwargs) @llm_completion_callback() async def _astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: all_kwargs = self._get_model_kwargs(**kwargs) message_dict = self._get_multi_modal_chat_messages( @@ -488,7 +488,7 @@ async def gen() -> ChatResponseAsyncGen: return gen() async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: return await self._astream_complete(prompt, image_documents, **kwargs) diff --git a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-replicate/llama_index/multi_modal_llms/replicate/base.py b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-replicate/llama_index/multi_modal_llms/replicate/base.py index eab3c7b589549..ec4a23156b8c3 100644 --- a/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-replicate/llama_index/multi_modal_llms/replicate/base.py +++ b/llama-index-integrations/multi_modal_llms/llama-index-multi-modal-llms-replicate/llama_index/multi_modal_llms/replicate/base.py @@ -20,7 +20,7 @@ MultiModalLLM, MultiModalLLMMetadata, ) -from llama_index.core.schema import ImageDocument +from llama_index.core.schema import ImageNode _logger = logging.getLogger(__name__) @@ -122,7 +122,7 @@ def _model_kwargs(self) -> Dict[str, Any]: } def _get_multi_modal_chat_messages( - self, prompt: str, image_document: ImageDocument, **kwargs: Any + self, prompt: str, image_document: ImageNode, **kwargs: Any ) -> Dict[str, Any]: if image_document.image_path: # load local image file and pass file handler to replicate @@ -151,7 +151,7 @@ def _get_multi_modal_chat_messages( ) def complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: response_gen = self.stream_complete(prompt, image_documents, **kwargs) response_list = list(response_gen) @@ -160,7 +160,7 @@ def complete( return final_response def stream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseGen: try: import replicate @@ -220,7 +220,7 @@ def stream_chat( # ===== Async Endpoints ===== async def acomplete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponse: response_gen = self.stream_complete(prompt, image_documents, **kwargs) response_list = list(response_gen) @@ -229,7 +229,7 @@ async def acomplete( return final_response async def astream_complete( - self, prompt: str, image_documents: Sequence[ImageDocument], **kwargs: Any + self, prompt: str, image_documents: Sequence[ImageNode], **kwargs: Any ) -> CompletionResponseAsyncGen: try: import replicate diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/.gitignore b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/BUILD b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/Makefile b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/README.md b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/README.md new file mode 100644 index 0000000000000..ea9de868d768a --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/README.md @@ -0,0 +1,55 @@ +# LlamaIndex Node_Parser Integration: Alibabacloud_Aisearch + +## Installation + +``` +pip install llama-index-node-parser-alibabacloud-aisearch +``` + +## Optional Installation + +For automatic parsing of image slices, you can optionally install `llama-index-readers-alibabacloud-aisearch`. + +``` +pip install llama-index-readers-alibabacloud-aisearch +``` + +## Usage + +For further details, please visit [document-split-api-details](https://help.aliyun.com/zh/open-search/search-platform/developer-reference/document-split-api-details). + +You can specify the `endpoint` and `aisearch_api_key` in the constructor, or set the environment variables `AISEARCH_ENDPOINT` and `AISEARCH_API_KEY`. + +```python +from llama_index.node_parser.alibabacloud_aisearch import ( + AlibabaCloudAISearchNodeParser, +) +from llama_index.core import Document + +try: + from llama_index.readers.alibabacloud_aisearch import ( + AlibabaCloudAISearchImageReader, + ) + + image_reader = AlibabaCloudAISearchImageReader( + service_id="ops-image-analyze-vlm-001" + ) +except ImportError: + image_reader = None +node_parser = AlibabaCloudAISearchNodeParser( + chunk_size=1024, image_reader=image_reader +) +nodes = node_parser( + [ + Document(text="content1", mimetype="text/markdown"), + Document( + text="content2 ![IMAGE](https://img.alicdn.com/imgextra/i1/O1CN01WksnF41hlhBFsXDNB_!!6000000004318-0-tps-1000-1400.jpg)", + mimetype="text/markdown", + ), + ], + show_progress=True, +) +for i, node in enumerate(nodes): + print(f"[SPLIT#{i}]:\n{node.get_content()}") + print("-" * 80) +``` diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/BUILD b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/__init__.py b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/__init__.py new file mode 100644 index 0000000000000..810f3a3641206 --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/__init__.py @@ -0,0 +1,6 @@ +from llama_index.node_parser.alibabacloud_aisearch.base import ( + AlibabaCloudAISearchNodeParser, +) + + +__all__ = ["AlibabaCloudAISearchNodeParser"] diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/base.py b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/base.py new file mode 100644 index 0000000000000..c7cb325bbc363 --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/llama_index/node_parser/alibabacloud_aisearch/base.py @@ -0,0 +1,185 @@ +import asyncio +import logging +from typing import Any, List, Optional, Sequence + +from llama_index.core.node_parser.node_utils import build_nodes_from_splits +from llama_index.core.node_parser.interface import NodeParser +from llama_index.core.readers.base import BaseReader +from llama_index.core.schema import BaseNode, TextNode +from llama_index.core.async_utils import run_jobs + +from typing import Any, List + +from llama_index.core.bridge.pydantic import Field, PrivateAttr + +from llama_index.core.base.llms.generic_utils import get_from_param_or_env + +try: + from alibabacloud_searchplat20240529.models import ( + GetDocumentSplitRequest, + GetDocumentSplitRequestDocument, + GetDocumentSplitRequestStrategy, + GetDocumentSplitResponse, + ) + from alibabacloud_tea_openapi.models import Config as AISearchConfig + from alibabacloud_searchplat20240529.client import Client + from Tea.exceptions import TeaException +except ImportError: + raise ImportError( + "Could not import alibabacloud_searchplat20240529 python package. " + "Please install it with `pip install alibabacloud-searchplat20240529`." + ) + +logger = logging.getLogger(__name__) + + +def aretry_decorator(func, wait_seconds: int = 1): + async def wrap(*args, **kwargs): + while True: + try: + return await func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + await asyncio.sleep(wait_seconds) + else: + raise + + return wrap + + +class AlibabaCloudAISearchNodeParser(NodeParser): + """ + For further details, please visit `https://help.aliyun.com/zh/open-search/search-platform/developer-reference/document-split-api-details`. + """ + + _client: Client = PrivateAttr() + _split_strategy: GetDocumentSplitRequestStrategy = PrivateAttr() + + image_reader: Optional[BaseReader] = Field(default=None, exclude=True) + + aisearch_api_key: str = Field(default=None, exclude=True) + endpoint: str = None + + service_id: str = "ops-document-split-001" + workspace_name: str = "default" + chunk_size: int = 300 + need_sentence: bool = False + default_content_encoding: str = "utf8" + default_content_type: str = "text/plain" + num_workers: int = 4 + + def __init__( + self, endpoint: str = None, aisearch_api_key: str = None, **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.aisearch_api_key = get_from_param_or_env( + "aisearch_api_key", aisearch_api_key, "AISEARCH_API_KEY" + ) + self.endpoint = get_from_param_or_env("endpoint", endpoint, "AISEARCH_ENDPOINT") + + config = AISearchConfig( + bearer_token=self.aisearch_api_key, + endpoint=self.endpoint, + protocol="http", + ) + + self._client = Client(config=config) + self._split_strategy = GetDocumentSplitRequestStrategy( + max_chunk_size=self.chunk_size, need_sentence=self.need_sentence + ) + + @classmethod + def class_name(cls) -> str: + """Get class name.""" + return "AlibabaCloudAISearchNodeParser" + + def _parse_nodes( + self, + documents: Sequence[BaseNode], + show_progress: bool = False, + **kwargs: Any, + ) -> List[BaseNode]: + return asyncio.get_event_loop().run_until_complete( + self._aparse_nodes(documents, show_progress, **kwargs) + ) + + async def _aparse_nodes( + self, + documents: Sequence[BaseNode], + show_progress: bool = False, + **kwargs: Any, + ) -> List[BaseNode]: + """Parse document into nodes. + + Args: + nodes (Sequence[BaseNode]): nodes to parse + """ + jobs = [self._aparse_node(d) for d in documents] + results = await run_jobs( + jobs, + workers=self.num_workers, + desc="Parsing documents into nodes", + show_progress=show_progress, + ) + # return flattened results + return [item for sublist in results for item in sublist] + + @aretry_decorator + async def _aparse_node( + self, + node: BaseNode, + ) -> List[BaseNode]: + content_type = getattr(node, "mimetype", self.default_content_type) + main_type, sub_type = content_type.split("/") + if main_type != "text": + raise ValueError(f"Unsupported content type: {content_type}") + content_encoding = node.metadata.get("encoding", self.default_content_encoding) + document = GetDocumentSplitRequestDocument( + content=node.get_content(), + content_encoding=content_encoding, + content_type=sub_type, + ) + request = GetDocumentSplitRequest( + document=document, strategy=self._split_strategy + ) + + response: GetDocumentSplitResponse = ( + await self._client.get_document_split_async( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + ) + ) + return await self._handle_response(response, node) + + async def _handle_response( + self, response: GetDocumentSplitResponse, node: BaseNode + ) -> List[TextNode]: + chunks = list(response.body.result.chunks) + if response.body.result.sentences: + chunks.extend(response.body.result.sentences) + chunks.extend(await self._handle_rich_texts(response.body.result.rich_texts)) + return build_nodes_from_splits( + [chunk.content for chunk in chunks], node, id_func=self.id_func + ) + + async def _handle_rich_texts(self, rich_texts) -> List[str]: + chunks = [] + if not rich_texts: + return chunks + chunks = list(rich_texts) + for chunk in chunks: + if chunk.meta.get("type") == "image": + chunk.content = await self._handle_image(chunk.content) + return chunks + + async def _handle_image(self, url: str) -> str: + content = url + if not self.image_reader: + return content + try: + docs = await self.image_reader.aload_data([url]) + content = docs[0].get_content() + except Exception as e: + logger.error(f"Read image {url} error: {e}") + return content diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/pyproject.toml b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/pyproject.toml new file mode 100644 index 0000000000000..7208f8faa57e3 --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.node_parser.alibabacloud_aisearch" + +[tool.llamahub.class_authors] +AlibabaCloudAISearchNodeParser = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["llama-index"] +description = "llama-index node_parser alibabacloud_aisearch integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-node-parser-alibabacloud-aisearch" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +alibabacloud-searchplat20240529 = "^1.1.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/BUILD b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/__init__.py b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/test_node_parser_alibabacloud_aisearch.py b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/test_node_parser_alibabacloud_aisearch.py new file mode 100644 index 0000000000000..bd73eba95828c --- /dev/null +++ b/llama-index-integrations/node_parser/llama-index-node-parser-alibabacloud-aisearch/tests/test_node_parser_alibabacloud_aisearch.py @@ -0,0 +1,7 @@ +from llama_index.node_parser.alibabacloud_aisearch import AlibabaCloudAISearchNodeParser +from llama_index.core.node_parser.interface import NodeParser + + +def test_class(): + names_of_base_classes = [b.__name__ for b in AlibabaCloudAISearchNodeParser.__mro__] + assert NodeParser.__name__ in names_of_base_classes diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/.gitignore b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/Makefile b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/README.md b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/README.md new file mode 100644 index 0000000000000..65e9d2c0d50f3 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/README.md @@ -0,0 +1,40 @@ +# LlamaIndex Postprocessor Integration: Alibabacloud_Aisearch_Rerank + +## Installation + +```bash +pip install llama-index-postprocessor-alibabacloud-aisearch-rerank +``` + +## Usage + +For further details, please visit [ranker-api-details](https://help.aliyun.com/zh/open-search/search-platform/developer-reference/ranker-api-details). + +You can specify the `endpoint` and `aisearch_api_key` in the constructor, or set the environment variables `AISEARCH_ENDPOINT` and `AISEARCH_API_KEY`. + +```python +from llama_index.core.data_structs import Node +from llama_index.core.schema import NodeWithScore +from llama_index.postprocessor.alibabacloud_aisearch_rerank import ( + AlibabaCloudAISearchRerank, +) + +nodes = [ + NodeWithScore( + node=Node(id_="1", text=""), + score=0.7, + ), + NodeWithScore( + node=Node(id_="2", text=""), + score=0.8, + ), + NodeWithScore( + node=Node(id_="3", text=""), + score=0.1, + ), +] +reranker = AlibabaCloudAISearchRerank(top_n=2) +new_nodes = reranker.postprocess_nodes(nodes, query_str="") +for node in new_nodes: + print(f"{node.node.text[:20]}\t{node.score}") +``` diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/__init__.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/__init__.py new file mode 100644 index 0000000000000..d3887f174f7fc --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/__init__.py @@ -0,0 +1,6 @@ +from llama_index.postprocessor.alibabacloud_aisearch_rerank.base import ( + AlibabaCloudAISearchRerank, +) + + +__all__ = ["AlibabaCloudAISearchRerank"] diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/base.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/base.py new file mode 100644 index 0000000000000..debe5efb2426c --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/llama_index/postprocessor/alibabacloud_aisearch_rerank/base.py @@ -0,0 +1,155 @@ +import time +from typing import Any, List, Optional + +from llama_index.core.bridge.pydantic import Field, PrivateAttr +from llama_index.core.callbacks import CBEventType, EventPayload +from llama_index.core.instrumentation import get_dispatcher +from llama_index.core.instrumentation.events.rerank import ( + ReRankEndEvent, + ReRankStartEvent, +) +from llama_index.core.postprocessor.types import BaseNodePostprocessor +from llama_index.core.schema import MetadataMode, NodeWithScore, QueryBundle + +from llama_index.core.base.llms.generic_utils import get_from_param_or_env + +try: + from alibabacloud_searchplat20240529.models import ( + GetDocumentRankRequest, + GetDocumentRankResponse, + GetDocumentRankResponseBodyResultScores, + ) + from alibabacloud_tea_openapi.models import Config as AISearchConfig + from alibabacloud_searchplat20240529.client import Client + from Tea.exceptions import TeaException +except ImportError: + raise ImportError( + "Could not import alibabacloud_searchplat20240529 python package. " + "Please install it with `pip install alibabacloud-searchplat20240529`." + ) + +dispatcher = get_dispatcher(__name__) + + +def retry_decorator(func, wait_seconds: int = 1): + def wrap(*args, **kwargs): + while True: + try: + return func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + time.sleep(wait_seconds) + else: + raise + + return wrap + + +class AlibabaCloudAISearchRerank(BaseNodePostprocessor): + """ + For further details, please visit `https://help.aliyun.com/zh/open-search/search-platform/developer-reference/ranker-api-details`. + """ + + _client: Client = PrivateAttr() + + aisearch_api_key: str = Field(default=None, exclude=True) + endpoint: str = None + + service_id: str = "ops-bge-reranker-larger" + workspace_name: str = "default" + top_n: int = 3 + batch_size: int = 16 + + def __init__( + self, endpoint: str = None, aisearch_api_key: str = None, **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.aisearch_api_key = get_from_param_or_env( + "aisearch_api_key", aisearch_api_key, "AISEARCH_API_KEY" + ) + self.endpoint = get_from_param_or_env("endpoint", endpoint, "AISEARCH_ENDPOINT") + + config = AISearchConfig( + bearer_token=self.aisearch_api_key, + endpoint=self.endpoint, + protocol="http", + ) + + self._client = Client(config=config) + + @classmethod + def class_name(cls) -> str: + return "AlibabaCloudAISearchRerank" + + @retry_decorator + def _rerank_one_batch( + self, query: str, texts: List[str] + ) -> List[GetDocumentRankResponseBodyResultScores]: + request = GetDocumentRankRequest(docs=texts, query=query) + response: GetDocumentRankResponse = self._client.get_document_rank( + workspace_name=self.workspace_name, + service_id=self.service_id, + request=request, + ) + return response.body.result.scores + + def _rerank( + self, query: str, texts: List[str], top_n: int + ) -> List[GetDocumentRankResponseBodyResultScores]: + scores = [] + for i in range(0, len(texts), self.batch_size): + batch_scores = self._rerank_one_batch(query, texts[i : i + self.batch_size]) + for score in batch_scores: + score.index = i + score.index + scores.extend(batch_scores) + scores.sort(key=lambda x: x.score, reverse=True) + return scores[:top_n] + + def _postprocess_nodes( + self, + nodes: List[NodeWithScore], + query_bundle: Optional[QueryBundle] = None, + ) -> List[NodeWithScore]: + dispatcher.event( + ReRankStartEvent( + query=query_bundle, + nodes=nodes, + top_n=self.top_n, + model_name=self.service_id, + ) + ) + + if query_bundle is None: + raise ValueError("Missing query bundle in extra info.") + if len(nodes) == 0: + return [] + + with self.callback_manager.event( + CBEventType.RERANKING, + payload={ + EventPayload.NODES: nodes, + EventPayload.MODEL_NAME: self.service_id, + EventPayload.QUERY_STR: query_bundle.query_str, + EventPayload.TOP_K: self.top_n, + }, + ) as event: + texts = [ + node.node.get_content(metadata_mode=MetadataMode.EMBED) + for node in nodes + ] + results = self._rerank( + query=query_bundle.query_str, + texts=texts, + top_n=self.top_n, + ) + + new_nodes = [] + for result in results: + new_node_with_score = NodeWithScore( + node=nodes[result.index].node, score=result.score + ) + new_nodes.append(new_node_with_score) + event.on_end(payload={EventPayload.NODES: new_nodes}) + + dispatcher.event(ReRankEndEvent(nodes=new_nodes)) + return new_nodes diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/pyproject.toml b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/pyproject.toml new file mode 100644 index 0000000000000..1d315bd1ffc6b --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.postprocessor.alibabacloud_aisearch_rerank" + +[tool.llamahub.class_authors] +AlibabaCloudAISearchRerank = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["llama-index"] +description = "llama-index postprocessor alibabacloud_aisearch_rerank integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-postprocessor-alibabacloud-aisearch-rerank" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +alibabacloud-searchplat20240529 = "^1.1.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/__init__.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/test_postprocessor_alibabacloud_aisearch_rerank.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/test_postprocessor_alibabacloud_aisearch_rerank.py new file mode 100644 index 0000000000000..2823670ee3e21 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-alibabacloud-aisearch-rerank/tests/test_postprocessor_alibabacloud_aisearch_rerank.py @@ -0,0 +1,9 @@ +from llama_index.core.postprocessor.types import BaseNodePostprocessor +from llama_index.postprocessor.alibabacloud_aisearch_rerank import ( + AlibabaCloudAISearchRerank, +) + + +def test_class(): + names_of_base_classes = [b.__name__ for b in AlibabaCloudAISearchRerank.__mro__] + assert BaseNodePostprocessor.__name__ in names_of_base_classes diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/README.md b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/README.md index 961f2eceb9a4e..4e16b020eea44 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/README.md +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/README.md @@ -20,7 +20,7 @@ Below is an example on how to use some common functionality surrounding text-gen ## Installation ```shell -pip install --upgrade llama-index llama-index-core llama-index-nvidia-rerank +pip install --upgrade llama-index llama-index-core llama-index-postprocessor-nvidia-rerank ``` ## Setup @@ -68,12 +68,12 @@ rerank = NVIDIARerank(base_url="http://localhost:1976/v1") ## Supported models -Querying `get_available_models` will still give you all of the other models offered by your API credentials. +Querying `available_models` will still give you all of the other models offered by your API credentials. ```python from llama_index.postprocessor.nvidia_rerank import NVIDIARerank -NVIDIARerank.get_available_models() +rerank.available_models ``` **To find out more about a specific model, please navigate to the NVIDIA NIM section of ai.nvidia.com [as linked here](https://docs.api.nvidia.com/nim/).** diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/llama_index/postprocessor/nvidia_rerank/base.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/llama_index/postprocessor/nvidia_rerank/base.py index 0641d966e42ba..5d8ff3ecf06b9 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/llama_index/postprocessor/nvidia_rerank/base.py +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/llama_index/postprocessor/nvidia_rerank/base.py @@ -1,6 +1,7 @@ from typing import Any, List, Optional, Generator, Literal -from urllib.parse import urlparse, urlunparse + +from urllib.parse import urlparse, urljoin from llama_index.core.bridge.pydantic import Field, PrivateAttr, BaseModel, ConfigDict from llama_index.core.callbacks import CBEventType, EventPayload from llama_index.core.instrumentation import get_dispatcher @@ -27,6 +28,7 @@ class Model(BaseModel): id: str + base_model: Optional[str] = None class NVIDIARerank(BaseNodePostprocessor): @@ -34,7 +36,6 @@ class NVIDIARerank(BaseNodePostprocessor): model_config = ConfigDict(validate_assignment=True) model: Optional[str] = Field( - default=DEFAULT_MODEL, description="The NVIDIA API Catalog reranker to use.", ) top_n: Optional[int] = Field( @@ -57,6 +58,8 @@ class NVIDIARerank(BaseNodePostprocessor): ) _api_key: str = PrivateAttr("NO_API_KEY_PROVIDED") # TODO: should be SecretStr _mode: str = PrivateAttr("nvidia") + _is_hosted: bool = PrivateAttr(True) + _base_url: str = PrivateAttr(MODEL_ENDPOINT_MAP.get(DEFAULT_MODEL)) _inference_url: Optional[str] = PrivateAttr(None) def _set_api_key(self, nvidia_api_key: str = None, api_key: str = None) -> None: @@ -69,7 +72,7 @@ def _set_api_key(self, nvidia_api_key: str = None, api_key: str = None) -> None: def __init__( self, - model: str = DEFAULT_MODEL, + model: Optional[str] = None, nvidia_api_key: Optional[str] = None, api_key: Optional[str] = None, base_url: Optional[str] = None, @@ -93,8 +96,16 @@ def __init__( API Key: - The recommended way to provide the API key is through the `NVIDIA_API_KEY` environment variable. """ + if not base_url or (base_url in MODEL_ENDPOINT_MAP.values() and not model): + model = model or DEFAULT_MODEL super().__init__(model=model, **kwargs) + base_url = base_url or MODEL_ENDPOINT_MAP.get(DEFAULT_MODEL) + self._is_hosted = base_url in MODEL_ENDPOINT_MAP.values() + + if not self._is_hosted and base_url: + self._base_url = base_url.rstrip("/") + "/" + self._api_key = get_from_param_or_env( "api_key", nvidia_api_key or api_key, @@ -102,10 +113,12 @@ def __init__( "NO_API_KEY_PROVIDED", ) - if base_url: # on-premises mode + if not self._is_hosted: # on-premises mode # in this case we trust the model name and base_url self._inference_url = self._validate_url(base_url) + "/ranking" else: # hosted mode + if not model: + model = MODEL_ENDPOINT_MAP.get(base_url) if model not in MODEL_ENDPOINT_MAP: raise ValueError( f"Model '{model}' not found. " @@ -115,6 +128,71 @@ def __init__( raise ValueError("An API key is required for hosted NIM.") self._inference_url = MODEL_ENDPOINT_MAP[model] + if not model: + self.__set_default_model() + + def __set_default_model(self): + """Set default model.""" + if not self._is_hosted: + valid_models = [ + model.id + for model in self.available_models + if not model.base_model or model.base_model == model.id + ] + self.model = next(iter(valid_models), None) + if self.model: + warnings.warn( + f"Default model is set as: {self.model}. \n" + "Set model using model parameter. \n" + "To get available models use available_models property.", + UserWarning, + ) + else: + raise ValueError("No locally hosted model was found.") + else: + self.model = DEFAULT_MODEL + + def _get_models(self) -> List[Model]: + session = requests.Session() + + if self._is_hosted: + _headers = { + "Authorization": f"Bearer {self._api_key}", + "Accept": "application/json", + } + else: + _headers = { + "Accept": "application/json", + } + url = ( + "https://integrate.api.nvidia.com/v1/models" + if self._is_hosted + else urljoin(self._base_url, "models") + ) + response = session.get(url, headers=_headers) + response.raise_for_status() + + assert ( + "data" in response.json() + ), "Response does not contain expected 'data' key" + assert isinstance( + response.json()["data"], list + ), "Response 'data' is not a list" + assert all( + isinstance(result, dict) for result in response.json()["data"] + ), "Response 'data' is not a list of dictionaries" + assert all( + "id" in result for result in response.json()["data"] + ), "Response 'rankings' is not a list of dictionaries with 'id'" + + return [ + Model( + id=model["id"], + base_model=getattr(model, "params", {}).get("root", None), + ) + for model in response.json()["data"] + ] + def _validate_url(self, base_url): """ Base URL Validation. @@ -125,9 +203,7 @@ def _validate_url(self, base_url): expected_format = "Expected format is 'http://host:port'." result = urlparse(base_url) if not (result.scheme and result.netloc): - raise ValueError( - f"Invalid base_url, Expected format is 'http://host:port': {base_url}" - ) + raise ValueError(f"Invalid base_url, {expected_format}") if result.path: normalized_path = result.path.strip("/") if normalized_path == "v1": @@ -135,15 +211,18 @@ def _validate_url(self, base_url): elif normalized_path == "v1/rankings": warnings.warn(f"{expected_format} Rest is Ignored.") else: - raise ValueError(f"Base URL path is not recognized. {expected_format}") - return urlunparse((result.scheme, result.netloc, "v1", "", "", "")) + raise ValueError(f"Invalid base_url, {expected_format}") + return base_url @property def available_models(self) -> List[Model]: """Get available models.""" # all available models are in the map ids = MODEL_ENDPOINT_MAP.keys() - return [Model(id=id) for id in ids] + if not self._is_hosted: + return self._get_models() + else: + return [Model(id=id) for id in ids] @classmethod def class_name(cls) -> str: diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/pyproject.toml b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/pyproject.toml index dc17ef8093189..d78302aeef994 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/pyproject.toml +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/pyproject.toml @@ -57,6 +57,7 @@ types-requests = "2.28.11.8" # TODO: unpin when mypy>0.991 types-setuptools = "67.1.0.0" [tool.poetry.group.test_integration.dependencies] +pytest-httpx = "*" requests-mock = "^1.12.1" [tool.pytest.ini_options] diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_api_key.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_api_key.py index cbb99104e084e..09ca0a2b0bd0b 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_api_key.py +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_api_key.py @@ -6,6 +6,19 @@ from llama_index.core.schema import NodeWithScore, Document from typing import Any +from requests_mock import Mocker + + +@pytest.fixture() +def mock_local_models(requests_mock: Mocker) -> None: + requests_mock.get( + "https://test_url/v1/models", + json={ + "data": [ + {"id": "model1"}, + ] + }, + ) def get_api_key(instance: Any) -> str: @@ -18,6 +31,7 @@ def test_create_default_url_without_api_key(masked_env_var: str) -> None: assert "API key is required" in str(e.value) +@pytest.mark.usefixtures("mock_local_models") def test_create_unknown_url_without_api_key(masked_env_var: str) -> None: Interface(base_url="https://test_url/v1") diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_available_models.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_available_models.py index 698118baa6cc9..fa2e9acd1e92c 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_available_models.py +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_available_models.py @@ -1,6 +1,19 @@ import pytest from llama_index.postprocessor.nvidia_rerank import NVIDIARerank +from requests_mock import Mocker + + +@pytest.fixture(autouse=True) +def mock_local_models(requests_mock: Mocker) -> None: + requests_mock.get( + "https://test_url/v1/models", + json={ + "data": [ + {"id": "model1"}, + ] + }, + ) @pytest.mark.integration() diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_base_url.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_base_url.py index da05b7bc5e0e2..6efd5de98db79 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_base_url.py +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_base_url.py @@ -47,7 +47,7 @@ def test_base_url_invalid_not_hosted( @pytest.mark.parametrize( "base_url", [ - "http://0.0.0.0:8888/v1/rankings", + "http://0.0.0.0:8888/v1", ], ) def test_base_url_valid_not_hosted(base_url: str, mock_v1_local_models2: None) -> None: diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_postprocessor_nvidia_rerank.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_postprocessor_nvidia_rerank.py index 5577fe61dee30..b35b32dedc0ef 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_postprocessor_nvidia_rerank.py +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-nvidia-rerank/tests/test_postprocessor_nvidia_rerank.py @@ -7,6 +7,27 @@ import faker +from requests_mock import Mocker + + +@pytest.fixture() +def known_unknown() -> str: + return "mock-model" + + +@pytest.fixture() +def mock_local_models(requests_mock: Mocker, known_unknown) -> None: + requests_mock.get( + "http://localhost:8000/v1/models", + json={ + "data": [ + { + "id": known_unknown, + }, + ] + }, + ) + @pytest.fixture() def text() -> str: @@ -170,3 +191,22 @@ def test_rerank_batching( assert all( result[i].score >= result[i + 1].score for i in range(len(result) - 1) ), "results are not sorted" + + +def test_default_known(mock_local_models, known_unknown: str) -> None: + """ + Test that a model in the model table will be accepted. + """ + # check if default model is getting set + with pytest.warns(UserWarning): + x = NVIDIARerank(base_url="http://localhost:8000/v1") + assert x.model == known_unknown + + +def test_default_lora() -> None: + """ + Test that a model in the model table will be accepted. + """ + # find a model that matches the public_class under test + x = NVIDIARerank(base_url="http://localhost:8000/v1", model="lora1") + assert x.model == "lora1" diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/README.md b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/README.md index 0717b519d63ce..837b41ad6be83 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/README.md +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/README.md @@ -1,23 +1,40 @@ # LlamaIndex Postprocessor Integration: Rankllm-Rerank -RankLLM offers a suite of listwise rerankers, albeit with focus on open source LLMs finetuned for the task. Currently, RankLLM supports 2 of these models: RankZephyr (`model="zephyr"`) and RankVicuna (`model="vicuna"`). RankLLM also support RankGPT usage (`model="gpt"`, `gpt_model="VALID_OPENAI_MODEL_NAME"`). +RankLLM offers a suite of rerankers, albeit with focus on open source LLMs finetuned for the task. To use a model offered by the RankLLM suite, pass the desired model's **Hugging Face model path**, found at [Castorini's Hugging Face](https://huggingface.co/castorini). -Please `pip install llama-index-postprocessor-rankllm-rerank` to install RankLLM rerank package. +e.g., to access `LiT5-Distill-base`, pass [`castorini/LiT5-Distill-base`](https://huggingface.co/castorini/LiT5-Distill-base) as the model name. -Parameters: +For more information about RankLLM and the models supported, visit **[rankllm.ai](http://rankllm.ai)**. Please `pip install llama-index-postprocessor-rankllm-rerank` to install RankLLM rerank package. -- top_n: Top N nodes to return from reranking. -- model: Reranker model name/class (`zephyr`, `vicuna`, or `gpt`). -- with_retrieval[Optional]: Perform retrieval before reranking with `Pyserini`. -- step_size[Optional]: Step size of sliding window for reranking large corpuses. -- gpt_model[Optional]: OpenAI model to use (e.g., `gpt-3.5-turbo`) if `model="gpt"` +#### Parameters: + +- `model`: Reranker model name +- `top_n`: Top N nodes to return from reranking +- `window_size`: Reranking window size. Applicable only for listwise and pairwise models. +- `batch_size`: Reranking batch size. Applicable only for pointwise models. + +#### Model Coverage + +Below are all the rerankers supported with the model name to be passed as an argument to the constructor. Some model have convenience names for ease of use: + +**Listwise**: + +- **RankZephyr**. model=`rank_zephyr` or `castorini/rank_zephyr_7b_v1_full` +- **RankVicuna**. model=`rank_zephyr` or `castorini/rank_vicuna_7b_v1` +- **RankGPT**. Takes in a _valid_ gpt model. e.g., `gpt-3.5-turbo`, `gpt-4`,`gpt-3` +- **LiT5 Distill**. model=`castorini/LiT5-Distill-base` +- **LiT5 Score**. model=`castorini/LiT5-Score-base` + +**Pointwise**: + +- MonoT5. model='monot5' ### 💻 Example Usage ``` pip install llama-index-core pip install llama-index-llms-openai -pip install llama-index-postprocessor-rankllm-rerank +from llama_index.postprocessor.rankllm_rerank import RankLLMRerank ``` First, build a vector store index with [llama-index](https://pypi.org/project/llama-index/). @@ -28,7 +45,7 @@ index = VectorStoreIndex.from_documents( ) ``` -To set up the retriever and reranker: +To set up the _retriever_ and _reranker_: ``` query_bundle = QueryBundle(query_str) @@ -41,15 +58,12 @@ retriever = VectorIndexRetriever( # configure reranker reranker = RankLLMRerank( + model=model_name top_n=reranker_top_n, - model=model, - with_retrieval=with_retrieval, - step_size=step_size, - gpt_model=gpt_model, ) ``` -To run retrieval+reranking: +To run _retrieval+reranking_: ``` # retrieve nodes @@ -65,8 +79,6 @@ reranked_nodes = reranker.postprocess_nodes( Currently, RankLLM rerankers require `CUDA` and for `rank-llm` to be installed (`pip install rank-llm`). The built-in retriever, which uses [Pyserini](https://github.com/castorini/pyserini), requires `JDK11`, `PyTorch`, and `Faiss`. -### castorini/rank_llm +### `castorini/rank_llm` -Repository for prompt-decoding using LLMs (`GPT3.5`, `GPT4`, `Vicuna`, and `Zephyr`)\ -Website: [http://rankllm.ai](http://rankllm.ai)\ -Stars: 193 +Repository for prompt-decoding using LLMs: **[http://rankllm.ai](http://rankllm.ai)** diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/base.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/base.py index 74a136ac96227..774101411fd2e 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/base.py +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/base.py @@ -1,5 +1,4 @@ from typing import Any, List, Optional -from enum import Enum from llama_index.core.bridge.pydantic import Field, PrivateAttr from llama_index.core.instrumentation import get_dispatcher @@ -12,76 +11,63 @@ dispatcher = get_dispatcher(__name__) +try: + from rank_llm.rerank.reranker import Reranker + from rank_llm.data import Request, Query, Candidate +except ImportError: + raise ImportError("RankLLM requires `pip install rank-llm`") -class RankLLMRerank(BaseNodePostprocessor): - """RankLLM-based reranker.""" - top_n: int = Field(default=5, description="Top N nodes to return from reranking.") - model: str = Field(default="zephyr", description="Reranker model name.") - with_retrieval: bool = Field( - default=False, description="Perform retrieval before reranking." +class RankLLMRerank(BaseNodePostprocessor): + """ + RankLLM reranking suite. This class allows access to several reranking models supported by RankLLM. To use a model offered by the RankLLM suite, pass the desired model's hugging face path, found at https://huggingface.co/castorini. e.g., to access LiT5-Distill-base, pass 'castorini/LiT5-Distill-base' as the model name (https://huggingface.co/castorini/LiT5-Distill-base). + + Below are all the rerankers supported with the model name to be passed as an argument to the constructor. Some model have convenience names for ease of use: + Listwise: + - RankZephyr. model='rank_zephyr' or 'castorini/rank_zephyr_7b_v1_full' + - RankVicuna. model='rank_zephyr' or 'castorini/rank_vicuna_7b_v1' + - RankGPT. Takes in a valid gpt model. e.g., 'gpt-3.5-turbo', 'gpt-4','gpt-3' + - LiT5 Distill. model='castorini/LiT5-Distill-base' + - LiT5 Score. model='castorini/LiT5-Score-base' + Pointwise: + - MonoT5. model='monot5' + + """ + + model: str = Field(description="Model name.") + top_n: Optional[int] = Field( + description="Number of nodes to return sorted by reranking score." + ) + window_size: Optional[int] = Field( + description="Reranking window size. Applicable only for listwise and pairwise models." ) - step_size: int = Field( - default=10, description="Step size for moving sliding window." + batch_size: Optional[int] = Field( + description="Reranking batch size. Applicable only for pointwise models." ) - gpt_model: str = Field(default="gpt-3.5-turbo", description="OpenAI model name.") + _model: Any = PrivateAttr() - _result: Any = PrivateAttr() - _retriever: Any = PrivateAttr() def __init__( self, - model, - top_n: int = 10, - with_retrieval: Optional[bool] = False, - step_size: Optional[int] = 10, - gpt_model: Optional[str] = "gpt-3.5-turbo", + model: str, + top_n: Optional[int] = None, + window_size: Optional[int] = None, + batch_size: Optional[int] = None, ): - try: - model_enum = ModelType(model.lower()) - except ValueError: - raise ValueError( - "Unsupported model type. Please use 'vicuna', 'zephyr', or 'gpt'." - ) - - from rank_llm.result import Result - super().__init__( model=model, top_n=top_n, - with_retrieval=with_retrieval, - step_size=step_size, - gpt_model=gpt_model, + window_size=window_size, + batch_size=batch_size, ) - self._result = Result - - if model_enum == ModelType.VICUNA: - from rank_llm.rerank.vicuna_reranker import VicunaReranker - - self._model = VicunaReranker() - elif model_enum == ModelType.ZEPHYR: - from rank_llm.rerank.zephyr_reranker import ZephyrReranker - - self._model = ZephyrReranker() - elif model_enum == ModelType.GPT: - from rank_llm.rerank.rank_gpt import SafeOpenai - from rank_llm.rerank.reranker import Reranker - from llama_index.llms.openai import OpenAI - - llm = OpenAI( - model=gpt_model, - temperature=0.0, - ) - - llm.metadata - - agent = SafeOpenai(model=gpt_model, context_size=4096, keys=llm.api_key) - self._model = Reranker(agent) - if with_retrieval: - from rank_llm.retrieve.retriever import Retriever - - self._retriever = Retriever + self._model = Reranker.create_agent( + model.lower(), + default_agent=None, + interactive=False, + window_size=window_size, + batch_size=batch_size, + ) @classmethod def class_name(cls) -> str: @@ -106,56 +92,46 @@ def _postprocess_nodes( for node in nodes ] - if self.with_retrieval: - hits = [ - { - "content": doc[0], - "qid": 1, - "docid": str(index), - "rank": index, - "score": doc[1], - } - for index, doc in enumerate(docs) - ] - retrieved_results = self._retriever.from_inline_hits( - query=query_bundle.query_str, hits=hits + request: List[Request] = [ + Request( + query=Query( + text=query_bundle.query_str, + qid=1, + ), + candidates=[ + Candidate( + docid=index, + score=doc[1], + doc={ + "body": doc[0], + "headings": "", + "title": "", + "url": "", + }, + ) + for index, doc in enumerate(docs) + ], ) - else: - retrieved_results = [ - self._result( - query=query_bundle.query_str, - hits=[ - { - "content": doc[0], - "qid": 1, - "docid": str(index), - "rank": index, - "score": doc[1], - } - for index, doc in enumerate(docs) - ], - ) - ] - - permutation = self._model.rerank( - retrieved_results=retrieved_results, - rank_end=len(docs), - window_size=min(20, len(docs)), - step=self.step_size, + ] + + # scores are maintained the same as generated from the retriever + permutation = self._model.rerank_batch( + request, + rank_end=len(request[0].candidates), + rank_start=0, + shuffle_candidates=False, + logging=False, + top_k_retrieve=len(request[0].candidates), ) new_nodes: List[NodeWithScore] = [] - for hit in permutation[0].hits: - idx: int = int(hit["docid"]) - new_nodes.append( - NodeWithScore(node=nodes[idx].node, score=nodes[idx].score) - ) - - dispatcher.event(ReRankEndEvent(nodes=new_nodes[: self.top_n])) - return new_nodes[: self.top_n] + for candidate in permutation[0].candidates: + id: int = int(candidate.docid) + new_nodes.append(NodeWithScore(node=nodes[id].node, score=nodes[id].score)) - -class ModelType(Enum): - VICUNA = "vicuna" - ZEPHYR = "zephyr" - GPT = "gpt" + if self.top_n is None: + dispatcher.event(ReRankEndEvent(nodes=new_nodes)) + return new_nodes + else: + dispatcher.event(ReRankEndEvent(nodes=new_nodes[: self.top_n])) + return new_nodes[: self.top_n] diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/test.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/test.py new file mode 100644 index 0000000000000..a327e6e40fdcb --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/llama_index/postprocessor/rankllm_rerank/test.py @@ -0,0 +1,5 @@ +from rank_llm.rerank.reranker import Reranker + +Reranker.create_agent( + model_path="rank_zephyr", default_agent=None, interactive=False, vllm_batched=True +) diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/pyproject.toml b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/pyproject.toml index 9563817325aa7..8bbca2b0d6c36 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/pyproject.toml +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/pyproject.toml @@ -24,17 +24,18 @@ ignore_missing_imports = true python_version = "3.8" [tool.poetry] -authors = ["Your Name "] +authors = ["Ryan Nguyen ryan.nguyen@uwaterloo.ca"] description = "llama-index postprocessor rankllm-rerank integration" exclude = ["**/BUILD"] license = "MIT" name = "llama-index-postprocessor-rankllm-rerank" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.0" +version = "0.3.0" [tool.poetry.dependencies] -python = ">=3.8.1,<4.0" +python = ">=3.10,<4.0" +rank_llm = ">=0.20.2" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/tests/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/tests/BUILD index dabf212d7e716..db53f45e8d68e 100644 --- a/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/tests/BUILD +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-rankllm-rerank/tests/BUILD @@ -1 +1,3 @@ -python_tests() +python_tests( + interpreter_constraints=["==3.10.*"] +) diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/.gitignore b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/Makefile b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/README.md b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/README.md new file mode 100644 index 0000000000000..fb3aae94f1fd4 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/README.md @@ -0,0 +1,40 @@ +# LlamaIndex Postprocessor Integration: Xinference Rerank + +Xorbits Inference (Xinference) is an open-source platform to streamline the operation and integration of a wide array of AI models. + +You can find a list of built-in rerank models in Xinference from its document [Rerank Models](https://inference.readthedocs.io/en/latest/models/builtin/rerank/index.html) + +To learn more about Xinference in general, visit https://inference.readthedocs.io/en/stable/models/model_abilities/rerank.html + +## Installation + +```shell +pip install llama-index-postprocessor-xinference-rerank +``` + +## Usage + +**Parameters Description:** + +- `model`: Model uid not model name, sometimes they may be the same (e.g., `bge-reranker-base`). +- `base_url`: base url of Xinference (e.g., `http://localhost:9997`). +- `top_n`: Top n nodes to return from reranker. (default 5). + +**Nodes Rerank Example** + +```python +from llama_index.postprocessor.xinference_rerank import XinferenceRerank + +xi_model_uid = "xinference model uid" +xi_base_url = "xinference base url" + +xi_rerank = XinferenceRerank( + top_n=5, + model=xi_model_uid, + base_url=xi_base_url, +) + + +def test_rerank_nodes(nodes, query_str): + response = xi_rerank.postprocess_nodes(nodes, query_str) +``` diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/__init__.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/__init__.py new file mode 100644 index 0000000000000..ea183ba6d2a39 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/__init__.py @@ -0,0 +1,3 @@ +from llama_index.postprocessor.xinference_rerank.base import XinferenceRerank + +__all__ = ["XinferenceRerank"] diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/base.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/base.py new file mode 100644 index 0000000000000..cef36c4712c1a --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/llama_index/postprocessor/xinference_rerank/base.py @@ -0,0 +1,91 @@ +import requests +from typing import List, Optional +from llama_index.core.bridge.pydantic import Field +from llama_index.core.callbacks import CBEventType, EventPayload +from llama_index.core.instrumentation import get_dispatcher +from llama_index.core.instrumentation.events.rerank import ( + ReRankEndEvent, + ReRankStartEvent, +) +from llama_index.core.postprocessor.types import BaseNodePostprocessor +from llama_index.core.schema import NodeWithScore, QueryBundle, MetadataMode + +dispatcher = get_dispatcher(__name__) + + +class XinferenceRerank(BaseNodePostprocessor): + """Class for Xinference Rerank.""" + + top_n: int = Field( + default=5, + description="The number of nodes to return.", + ) + model: str = Field( + default="bge-reranker-base", + description="The Xinference model uid to use.", + ) + base_url: str = Field( + default="http://localhost:9997", + description="The Xinference base url to use.", + ) + + @classmethod + def class_name(cls) -> str: + return "XinferenceRerank" + + def get_query_str(self, query): + return query.query_str if isinstance(query, QueryBundle) else query + + def _postprocess_nodes( + self, + nodes: List[NodeWithScore], + query_bundle: Optional[QueryBundle] = None, + ) -> List[NodeWithScore]: + dispatcher.event( + ReRankStartEvent( + query=query_bundle, + nodes=nodes, + top_n=self.top_n, + model_name=self.model, + ) + ) + if query_bundle is None: + raise ValueError("Missing query bundle.") + if len(nodes) == 0: + return [] + with self.callback_manager.event( + CBEventType.RERANKING, + payload={ + EventPayload.NODES: nodes, + EventPayload.MODEL_NAME: self.model, + EventPayload.QUERY_STR: self.get_query_str(query_bundle), + EventPayload.TOP_K: self.top_n, + }, + ) as event: + headers = {"Content-Type": "application/json"} + json_data = { + "model": self.model, + "query": self.get_query_str(query_bundle), + "documents": [ + node.node.get_content(metadata_mode=MetadataMode.EMBED) + for node in nodes + ], + } + response = requests.post( + url=f"{self.base_url}/v1/rerank", headers=headers, json=json_data + ) + response.encoding = "utf-8" + if response.status_code != 200: + raise Exception( + f"Xinference call failed with status code {response.status_code}." + f"Details: {response.text}" + ) + rerank_nodes = [ + NodeWithScore( + node=nodes[result["index"]].node, score=result["relevance_score"] + ) + for result in response.json()["results"][: self.top_n] + ] + event.on_end(payload={EventPayload.NODES: rerank_nodes}) + dispatcher.event(ReRankEndEvent(nodes=rerank_nodes)) + return rerank_nodes diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/pyproject.toml b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/pyproject.toml new file mode 100644 index 0000000000000..cc5a9ca835e74 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/pyproject.toml @@ -0,0 +1,62 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.postprocessor.xinference_rerank" + +[tool.llamahub.class_authors] +XinferenceRerank = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["Your Name "] +description = "llama-index postprocessor xinference rerank integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-postprocessor-xinference-rerank" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/BUILD b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/__init__.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/test_postprocessor_xinference_rerank.py b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/test_postprocessor_xinference_rerank.py new file mode 100644 index 0000000000000..b5d1ff0553489 --- /dev/null +++ b/llama-index-integrations/postprocessor/llama-index-postprocessor-xinference-rerank/tests/test_postprocessor_xinference_rerank.py @@ -0,0 +1,7 @@ +from llama_index.core.postprocessor.types import BaseNodePostprocessor +from llama_index.postprocessor.xinference_rerank import XinferenceRerank + + +def test_class(): + names_of_base_classes = [b.__name__ for b in XinferenceRerank.__mro__] + assert BaseNodePostprocessor.__name__ in names_of_base_classes diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/.gitignore b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/BUILD b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/Makefile b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/README.md b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/README.md new file mode 100644 index 0000000000000..b48579e3d2d9f --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/README.md @@ -0,0 +1,61 @@ +# LlamaIndex Readers Integration: Alibabacloud_Aisearch + +## Installation + +``` +pip install llama-index-readers-alibabacloud-aisearch +``` + +## Usage + +Supported file types: ppt/pptx, doc/docx, pdf, images and so on. +For further details, please visit: + +- [document-analyze-api-details](https://help.aliyun.com/zh/open-search/search-platform/developer-reference/api-details) +- [image-analyze-api-details](https://help.aliyun.com/zh/open-search/search-platform/developer-reference/opensearch-api-details) + +You can specify the `endpoint` and `aisearch_api_key` in the constructor, or set the environment variables `AISEARCH_ENDPOINT` and `AISEARCH_API_KEY`. + +### Read local files + +```python +from llama_index.readers.alibabacloud_aisearch import ( + AlibabaCloudAISearchDocumentReader, + AlibabaCloudAISearchImageReader, +) +from llama_index.core import SimpleDirectoryReader + +document_reader = AlibabaCloudAISearchDocumentReader() +image_reader = AlibabaCloudAISearchImageReader() + +file_extractor = {} +for suffix in (".pdf", ".docx", ".doc", ".ppt", ".pptx"): + file_extractor[suffix] = document_reader +for suffix in (".jpg", ".jpeg", ".png", ".bmp", ".tiff"): + file_extractor[suffix] = image_reader + +documents = SimpleDirectoryReader( + "./data", file_extractor=file_extractor +).load_data(show_progress=True) +print(documents) +``` + +### Read remote files + +```python +from llama_index.readers.alibabacloud_aisearch import ( + AlibabaCloudAISearchImageReader, +) + +image_reader = AlibabaCloudAISearchImageReader( + service_id="ops-image-analyze-ocr-001" +) +image_urls = [ + "https://img.alicdn.com/imgextra/i1/O1CN01WksnF41hlhBFsXDNB_!!6000000004318-0-tps-1000-1400.jpg", +] + +# The file_type is automatically determined based on the file extension. +# If it cannot be identified, manual specification of the file_type is required. +documents = image_reader.load_data(file_path=image_urls, file_type="jpg") +print(documents) +``` diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/BUILD b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/__init__.py b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/__init__.py new file mode 100644 index 0000000000000..b5de4e4a16907 --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/__init__.py @@ -0,0 +1,7 @@ +from llama_index.readers.alibabacloud_aisearch.base import ( + AlibabaCloudAISearchDocumentReader, + AlibabaCloudAISearchImageReader, +) + + +__all__ = ["AlibabaCloudAISearchDocumentReader", "AlibabaCloudAISearchImageReader"] diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/base.py b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/base.py new file mode 100644 index 0000000000000..1f56cb83c23f6 --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/llama_index/readers/alibabacloud_aisearch/base.py @@ -0,0 +1,301 @@ +import base64 +import os +import asyncio +from pathlib import Path +import re +import time +from typing import Any, List, Union + +from llama_index.core.async_utils import run_jobs +from llama_index.core.bridge.pydantic import Field, PrivateAttr +from llama_index.core.readers.base import BasePydanticReader +from llama_index.core.schema import Document + +from llama_index.core.base.llms.generic_utils import get_from_param_or_env + +try: + from alibabacloud_searchplat20240529.models import ( + CreateDocumentAnalyzeTaskRequestDocument, + CreateDocumentAnalyzeTaskRequest, + CreateDocumentAnalyzeTaskRequestOutput, + CreateDocumentAnalyzeTaskResponse, + GetDocumentAnalyzeTaskStatusRequest, + GetDocumentAnalyzeTaskStatusResponse, + CreateImageAnalyzeTaskRequestDocument, + CreateImageAnalyzeTaskRequest, + CreateImageAnalyzeTaskResponse, + GetImageAnalyzeTaskStatusRequest, + GetImageAnalyzeTaskStatusResponse, + ) + from alibabacloud_tea_openapi.models import Config as AISearchConfig + from alibabacloud_searchplat20240529.client import Client + from Tea.exceptions import TeaException +except ImportError: + raise ImportError( + "Could not import alibabacloud_searchplat20240529 python package. " + "Please install it with `pip install alibabacloud-searchplat20240529`." + ) + +FilePath = Union[str, Path] + + +def retry_decorator(func, wait_seconds: int = 1): + def wrap(*args, **kwargs): + while True: + try: + return func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + time.sleep(wait_seconds) + else: + raise + + return wrap + + +def aretry_decorator(func, wait_seconds: int = 1): + async def wrap(*args, **kwargs): + while True: + try: + return await func(*args, **kwargs) + except TeaException as e: + if e.code == "Throttling.RateQuota": + await asyncio.sleep(wait_seconds) + else: + raise + + return wrap + + +class AlibabaCloudAISearchDocumentReader(BasePydanticReader): + """ + Supported file types include PPT/PPTX, DOC/DOCX, PDF, and more. + For further details, please visit `https://help.aliyun.com/zh/open-search/search-platform/developer-reference/api-details`. + """ + + _client: Client = PrivateAttr() + + aisearch_api_key: str = Field(default=None, exclude=True) + endpoint: str = None + + service_id: str = "ops-document-analyze-001" + workspace_name: str = "default" + + check_interval: int = 3 + num_workers: int = 4 + show_progress: bool = False + + def __init__( + self, endpoint: str = None, aisearch_api_key: str = None, **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.aisearch_api_key = get_from_param_or_env( + "aisearch_api_key", aisearch_api_key, "AISEARCH_API_KEY" + ) + self.endpoint = get_from_param_or_env("endpoint", endpoint, "AISEARCH_ENDPOINT") + + config = AISearchConfig( + bearer_token=self.aisearch_api_key, + endpoint=self.endpoint, + protocol="http", + ) + + self._client = Client(config=config) + + # upload a document and get back a task_id + @aretry_decorator + async def _create_task( + self, + file_path: str, + file_type: str, + **load_kwargs: Any, + ) -> str: + if file_path.startswith("http"): + file_name = os.path.basename(file_path.split("?")[0].split("#")[0]) + if not file_type: + file_type = os.path.splitext(file_name)[1][1:] + document = CreateDocumentAnalyzeTaskRequestDocument( + url=file_path, + file_name=file_name, + file_type=file_type, + ) + else: + file_name = os.path.basename(file_path) + if not file_type: + file_type = os.path.splitext(file_name)[1][1:] + document = CreateDocumentAnalyzeTaskRequestDocument( + content=base64.b64encode(open(file_path, "rb").read()).decode(), + file_name=file_name, + file_type=file_type, + ) + if not file_type: + raise ValueError( + "The file_type cannot be determined based on the file extension. Please specify it manually." + ) + output = CreateDocumentAnalyzeTaskRequestOutput( + image_storage=load_kwargs.get("image_storage", "url") + ) + request = CreateDocumentAnalyzeTaskRequest(document=document, output=output) + response: CreateDocumentAnalyzeTaskResponse = ( + await self._client.create_document_analyze_task_async( + self.workspace_name, self.service_id, request + ) + ) + return response.body.result.task_id + + async def _get_task_result(self, task_id: str) -> Document: + request = GetDocumentAnalyzeTaskStatusRequest(task_id=task_id) + while True: + response: GetDocumentAnalyzeTaskStatusResponse = ( + await self._client.get_document_analyze_task_status_async( + self.workspace_name, self.service_id, request + ) + ) + status = response.body.result.status + if status == "PENDING": + await asyncio.sleep(self.check_interval) + elif status == "SUCCESS": + data = response.body.result.data + return Document( + text=data.content, + mimetype=f"text/{data.content_type}", + ) + else: + raise RuntimeError( + f"Failed to parse the file, error: {response.body.result.error}, task id: {task_id}" + ) + + async def _aload_data( + self, + file_path: str, + file_type: str = None, + **load_kwargs: Any, + ) -> Document: + """Load data from the input path.""" + task_id = await self._create_task(file_path, file_type, **load_kwargs) + return await self._get_task_result(task_id) + + async def aload_data( + self, + file_path: Union[List[FilePath], FilePath], + file_type: Union[List[FilePath], FilePath] = None, + **load_kwargs: Any, + ) -> List[Document]: + """Load data from the input path.""" + if isinstance(file_path, (str, Path)): + doc = await self._aload_data(str(file_path), file_type, **load_kwargs) + return [doc] + elif isinstance(file_path, list): + if isinstance(file_type, list) and len(file_type) != len(file_path): + raise ValueError( + "The length of file_type must be the same as file_path." + ) + else: + file_type = [file_type] * len(file_path) + jobs = [ + self._aload_data( + str(f), + t, + **load_kwargs, + ) + for f, t in zip(file_path, file_type) + ] + return await run_jobs( + jobs, + workers=self.num_workers, + desc="Parsing files", + show_progress=self.show_progress, + ) + else: + raise ValueError( + "The input file_path must be a string or a list of strings." + ) + + def load_data( + self, + file_path: Union[List[FilePath], FilePath], + **load_kwargs: Any, + ) -> List[Document]: + """Load data from the input path.""" + return asyncio.get_event_loop().run_until_complete( + self.aload_data(file_path, **load_kwargs) + ) + + +class AlibabaCloudAISearchImageReader(AlibabaCloudAISearchDocumentReader): + """ + For further details, please visit `https://help.aliyun.com/zh/open-search/search-platform/developer-reference/opensearch-api-details`. + """ + + service_id: str = "ops-image-analyze-ocr-001" + + # upload a document and get back a task_id + @aretry_decorator + async def _create_task( + self, + file_path: str, + file_type: str, + **load_kwargs: Any, + ) -> str: + if file_path.startswith("data:"): + prefix, content = file_path.split(",") + if not file_type: + m = re.match(r"^data:image/(\w+);base64$", prefix) + file_type = m.group(1) + file_name = f"image.{file_type}" + document = CreateImageAnalyzeTaskRequestDocument( + content=content, + file_name=file_name, + file_type=file_type, + ) + elif file_path.startswith("http"): + file_name = os.path.basename(file_path.split("?")[0].split("#")[0]) + if not file_type: + file_type = os.path.splitext(file_name)[1][1:] + document = CreateImageAnalyzeTaskRequestDocument( + url=file_path, + file_name=file_name, + file_type=file_type, + ) + else: + file_name = os.path.basename(file_path) + if not file_type: + file_type = os.path.splitext(file_name)[1][1:] + document = CreateImageAnalyzeTaskRequestDocument( + content=base64.b64encode(open(file_path, "rb").read()).decode(), + file_name=file_name, + file_type=file_type, + ) + if not file_type: + raise ValueError( + "The file_type cannot be determined based on the file extension. Please specify it manually." + ) + request = CreateImageAnalyzeTaskRequest(document=document) + response: CreateImageAnalyzeTaskResponse = ( + await self._client.create_image_analyze_task_async( + self.workspace_name, self.service_id, request + ) + ) + return response.body.result.task_id + + async def _get_task_result(self, task_id: str) -> Document: + request = GetImageAnalyzeTaskStatusRequest(task_id=task_id) + while True: + response: GetImageAnalyzeTaskStatusResponse = ( + await self._client.get_image_analyze_task_status_async( + self.workspace_name, self.service_id, request + ) + ) + status = response.body.result.status + if status == "PENDING": + await asyncio.sleep(self.check_interval) + elif status == "SUCCESS": + data = response.body.result.data + return Document( + text=data.content, + mimetype=f"text/{data.content_type}", + ) + else: + raise RuntimeError( + f"Failed to parse the file, error: {response.body.result.error}, task id: {task_id}" + ) diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/pyproject.toml new file mode 100644 index 0000000000000..980b7cb746057 --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/pyproject.toml @@ -0,0 +1,64 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.readers.alibabacloud_aisearch" + +[tool.llamahub.class_authors] +AlibabaCloudAISearchDocumentReader = "llama-index" +AlibabaCloudAISearchImageReader = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["llama-index"] +description = "llama-index readers alibabacloud_aisearch integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-readers-alibabacloud-aisearch" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +alibabacloud-searchplat20240529 = "^1.1.0" +llama-index-core = "^0.11.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/BUILD b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/__init__.py b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/test_readers_alibabacloud_aisearch.py b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/test_readers_alibabacloud_aisearch.py new file mode 100644 index 0000000000000..9b8ee31098dce --- /dev/null +++ b/llama-index-integrations/readers/llama-index-readers-alibabacloud-aisearch/tests/test_readers_alibabacloud_aisearch.py @@ -0,0 +1,16 @@ +from llama_index.readers.alibabacloud_aisearch import ( + AlibabaCloudAISearchDocumentReader, + AlibabaCloudAISearchImageReader, +) +from llama_index.core.readers.base import BasePydanticReader + + +def test_class(): + names_of_base_classes = [ + b.__name__ for b in AlibabaCloudAISearchDocumentReader.__mro__ + ] + assert BasePydanticReader.__name__ in names_of_base_classes + names_of_base_classes = [ + b.__name__ for b in AlibabaCloudAISearchImageReader.__mro__ + ] + assert BasePydanticReader.__name__ in names_of_base_classes diff --git a/llama-index-integrations/readers/llama-index-readers-box/llama_index/readers/box/BoxAPI/box_llama_adaptors.py b/llama-index-integrations/readers/llama-index-readers-box/llama_index/readers/box/BoxAPI/box_llama_adaptors.py index 1b42f6a101de7..1df59ba23c0ed 100644 --- a/llama-index-integrations/readers/llama-index-readers-box/llama_index/readers/box/BoxAPI/box_llama_adaptors.py +++ b/llama-index-integrations/readers/llama-index-readers-box/llama_index/readers/box/BoxAPI/box_llama_adaptors.py @@ -23,12 +23,12 @@ def box_file_to_llama_document_metadata(box_file: File) -> dict: "description": box_file.description, "size": box_file.size, "path_collection": path_collection, - "created_at": box_file.created_at, - "modified_at": box_file.modified_at, - "trashed_at": box_file.trashed_at, - "purged_at": box_file.purged_at, - "content_created_at": box_file.content_created_at, - "content_modified_at": box_file.content_modified_at, + "created_at": box_file.created_at.isoformat(), + "modified_at": box_file.modified_at.isoformat(), + "trashed_at": box_file.trashed_at.isoformat() if box_file.trashed_at else None, + "purged_at": box_file.purged_at.isoformat() if box_file.purged_at else None, + "content_created_at": box_file.content_created_at.isoformat(), + "content_modified_at": box_file.content_modified_at.isoformat(), "created_by": f"{box_file.created_by.id},{box_file.created_by.name},{box_file.created_by.login}", "modified_by": f"{box_file.modified_by.id},{box_file.modified_by.name},{box_file.modified_by.login}", "owned_by": f"{box_file.owned_by.id},{box_file.owned_by.name},{box_file.owned_by.login}", diff --git a/llama-index-integrations/readers/llama-index-readers-box/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-box/pyproject.toml index b9180d32cc7c5..d6562fb3bcadd 100644 --- a/llama-index-integrations/readers/llama-index-readers-box/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-box/pyproject.toml @@ -37,7 +37,7 @@ maintainers = [ name = "llama-index-readers-box" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/readers/llama-index-readers-box/tests/test_readers_box_reader.py b/llama-index-integrations/readers/llama-index-readers-box/tests/test_readers_box_reader.py index 7cd1681d7f988..3bc2a0557737d 100644 --- a/llama-index-integrations/readers/llama-index-readers-box/tests/test_readers_box_reader.py +++ b/llama-index-integrations/readers/llama-index-readers-box/tests/test_readers_box_reader.py @@ -1,3 +1,4 @@ +import datetime import pytest from pathlib import Path from llama_index.core.readers.base import BaseReader @@ -47,6 +48,17 @@ def test_box_reader_csv(box_client_ccg_integration_testing: BoxClient): assert len(docs) == 1 +def test_box_reader_metadata(box_client_ccg_integration_testing: BoxClient): + test_data = get_testing_data() + reader = BoxReader(box_client=box_client_ccg_integration_testing) + docs = reader.load_data(file_ids=[test_data["test_csv_id"]]) + assert len(docs) == 1 + doc = docs[0] + # check if metadata dictionary does not contain any datetime objects + for v in doc.metadata.values(): + assert not isinstance(v, (datetime.datetime, datetime.date, datetime.time)) + + def test_box_reader_folder(box_client_ccg_integration_testing): # Very slow test test_data = get_testing_data() diff --git a/llama-index-integrations/readers/llama-index-readers-confluence/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-confluence/pyproject.toml index e75ffffa31bbc..21cb68aed91df 100644 --- a/llama-index-integrations/readers/llama-index-readers-confluence/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-confluence/pyproject.toml @@ -28,12 +28,12 @@ license = "MIT" maintainers = ["zywilliamli"] name = "llama-index-readers-confluence" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" atlassian-python-api = "^3.41.9" -html2text = "^2020.1.16" +html2text = "^2024.2.26" pytesseract = "^0.3.10" pdf2image = "^1.17.0" pillow = "^10.2.0" diff --git a/llama-index-integrations/readers/llama-index-readers-dashscope/llama_index/readers/dashscope/base.py b/llama-index-integrations/readers/llama-index-readers-dashscope/llama_index/readers/dashscope/base.py index cbb150c276038..b2fde36d1d460 100644 --- a/llama-index-integrations/readers/llama-index-readers-dashscope/llama_index/readers/dashscope/base.py +++ b/llama-index-integrations/readers/llama-index-readers-dashscope/llama_index/readers/dashscope/base.py @@ -38,20 +38,27 @@ class DashScopeParse(BasePydanticReader): """A smart-parser for files.""" - api_key: str = Field(default="", description="The API key for the DashScope API.") + api_key: str = Field( + default="", + description="The API key for the DashScope API.", + validate_default=True, + ) workspace_id: str = Field( default="", description="The Workspace for the DashScope API.If not set, " "it will use the default workspace.", + validate_default=True, ) category_id: str = Field( default=DASHSCOPE_DEFAULT_DC_CATEGORY, description="The dc category for the DashScope API.If not set, " "it will use the default dc category.", + validate_default=True, ) base_url: str = Field( default=DASHSCOPE_DEFAULT_BASE_URL, description="The base URL of the DashScope Parsing API.", + validate_default=True, ) result_type: ResultType = Field( default=ResultType.DASHSCOPE_DOCMIND, @@ -86,7 +93,7 @@ class DashScopeParse(BasePydanticReader): description="Whether or not to return parsed text content.", ) - @field_validator("api_key", mode="before") + @field_validator("api_key", mode="before", check_fields=True) def validate_api_key(cls, v: str) -> str: """Validate the API key.""" if not v: @@ -99,7 +106,7 @@ def validate_api_key(cls, v: str) -> str: return v - @field_validator("workspace_id", mode="before") + @field_validator("workspace_id", mode="before", check_fields=True) def validate_workspace_id(cls, v: str) -> str: """Validate the Workspace.""" if not v: @@ -109,7 +116,7 @@ def validate_workspace_id(cls, v: str) -> str: return v - @field_validator("category_id", mode="before") + @field_validator("category_id", mode="before", check_fields=True) def validate_category_id(cls, v: str) -> str: """Validate the category.""" if not v: @@ -118,7 +125,7 @@ def validate_category_id(cls, v: str) -> str: return os.getenv("DASHSCOPE_CATEGORY_ID", DASHSCOPE_DEFAULT_DC_CATEGORY) return v - @field_validator("base_url", mode="before") + @field_validator("base_url", mode="before", check_fields=True) def validate_base_url(cls, v: str) -> str: """Validate the base URL.""" if v and v != DASHSCOPE_DEFAULT_BASE_URL: diff --git a/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml index 3d63efb2a9f67..72593437af7e2 100644 --- a/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml @@ -30,7 +30,7 @@ license = "MIT" name = "llama-index-readers-dashscope" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/readers/llama-index-readers-discord/llama_index/readers/discord/base.py b/llama-index-integrations/readers/llama-index-readers-discord/llama_index/readers/discord/base.py index b0f0f20ae5dcf..ac39334431a5a 100644 --- a/llama-index-integrations/readers/llama-index-readers-discord/llama_index/readers/discord/base.py +++ b/llama-index-integrations/readers/llama-index-readers-discord/llama_index/readers/discord/base.py @@ -73,7 +73,7 @@ async def on_ready(self) -> None: return [ Document( text=msg.content, - id_=msg.id, + id_=str(msg.id), metadata={ "message_id": msg.id, "username": msg.author.name, diff --git a/llama-index-integrations/readers/llama-index-readers-discord/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-discord/pyproject.toml index 62212e152cab3..119f4a34a3a0f 100644 --- a/llama-index-integrations/readers/llama-index-readers-discord/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-discord/pyproject.toml @@ -28,7 +28,7 @@ license = "MIT" maintainers = ["jerryjliu"] name = "llama-index-readers-discord" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/readers/llama-index-readers-docstring-walker/examples/docstringwalker_example.ipynb b/llama-index-integrations/readers/llama-index-readers-docstring-walker/examples/docstringwalker_example.ipynb index 90cdf19505634..fdc580ac8617f 100644 --- a/llama-index-integrations/readers/llama-index-readers-docstring-walker/examples/docstringwalker_example.ipynb +++ b/llama-index-integrations/readers/llama-index-readers-docstring-walker/examples/docstringwalker_example.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/llama-index-integrations/readers/llama-index-readers-file/llama_index/readers/file/image/base.py b/llama-index-integrations/readers/llama-index-readers-file/llama_index/readers/file/image/base.py index b414c1964022e..ff175e6eef105 100644 --- a/llama-index-integrations/readers/llama-index-readers-file/llama_index/readers/file/image/base.py +++ b/llama-index-integrations/readers/llama-index-readers-file/llama_index/readers/file/image/base.py @@ -82,7 +82,7 @@ def load_data( # load document image if fs: with fs.open(path=file) as f: - image = Image.open(f.read()) + image = Image.open(BytesIO(f.read())) else: image = Image.open(file) diff --git a/llama-index-integrations/readers/llama-index-readers-file/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-file/pyproject.toml index 87617d5196262..816cfc838c1ce 100644 --- a/llama-index-integrations/readers/llama-index-readers-file/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-file/pyproject.toml @@ -51,7 +51,7 @@ license = "MIT" maintainers = ["FarisHijazi", "Haowjy", "ephe-meral", "hursh-desai", "iamarunbrahma", "jon-chuang", "mmaatouk", "ravi03071991", "sangwongenip", "thejessezhang"] name = "llama-index-readers-file" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/readers/llama-index-readers-llama-parse/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-llama-parse/pyproject.toml index e367ef9afc939..d6c2371a71827 100644 --- a/llama-index-integrations/readers/llama-index-readers-llama-parse/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-llama-parse/pyproject.toml @@ -28,11 +28,11 @@ keywords = ["PDF", "llama", "llama-parse", "parse"] license = "MIT" name = "llama-index-readers-llama-parse" readme = "README.md" -version = "0.2.0" +version = "0.3.0" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -llama-parse = ">=0.4.0" +llama-parse = ">=0.5.0" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/llama_index/readers/microsoft_sharepoint/base.py b/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/llama_index/readers/microsoft_sharepoint/base.py index 48910873fdfc4..f2b4e4c16fc74 100644 --- a/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/llama_index/readers/microsoft_sharepoint/base.py +++ b/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/llama_index/readers/microsoft_sharepoint/base.py @@ -116,12 +116,17 @@ def _get_access_token(self) -> str: data=payload, ) - if response.status_code == 200 and "access_token" in response.json(): - return response.json()["access_token"] + json_response = response.json() + + if response.status_code == 200 and "access_token" in json_response: + return json_response["access_token"] else: - logger.error(response.json()["error"]) - raise ValueError(response.json()["error_description"]) + error_message = json_response.get("error_description") or json_response.get( + "error" + ) + logger.error("Error retrieving access token: %s", json_response["error"]) + raise ValueError(f"Error retrieving access token: {error_message}") def _get_site_id_with_host_name( self, access_token, sharepoint_site_name: Optional[str] @@ -178,10 +183,11 @@ def _get_site_id_with_host_name( f"The specified sharepoint site {sharepoint_site_name} is not found." ) else: - if "error_description" in json_response: - logger.error(json_response["error"]) - raise ValueError(json_response["error_description"]) - raise ValueError(json_response["error"]) + error_message = json_response.get( + "error_description" + ) or json_response.get("error") + logger.error("Error retrieving site ID: %s", json_response["error"]) + raise ValueError(f"Error retrieving site ID: {error_message}") raise ValueError( f"The specified sharepoint site {sharepoint_site_name} is not found." @@ -209,26 +215,27 @@ def _get_drive_id(self) -> str: url=self._drive_id_endpoint, headers=self._authorization_headers, ) + json_response = response.json() - if response.status_code == 200 and "value" in response.json(): - if len(response.json()["value"]) > 0 and self.drive_name is not None: - for drive in response.json()["value"]: + if response.status_code == 200 and "value" in json_response: + if len(json_response["value"]) > 0 and self.drive_name is not None: + for drive in json_response["value"]: if drive["name"].lower() == self.drive_name.lower(): return drive["id"] raise ValueError(f"The specified drive {self.drive_name} is not found.") - if ( - len(response.json()["value"]) > 0 - and "id" in response.json()["value"][0] - ): - return response.json()["value"][0]["id"] + if len(json_response["value"]) > 0 and "id" in json_response["value"][0]: + return json_response["value"][0]["id"] else: raise ValueError( "Error occurred while fetching the drives for the sharepoint site." ) else: - logger.error(response.json()["error"]) - raise ValueError(response.json()["error_description"]) + error_message = json_response.get("error_description") or json_response.get( + "error" + ) + logger.error("Error retrieving drive ID: %s", json_response["error"]) + raise ValueError(f"Error retrieving drive ID: {error_message}") def _get_sharepoint_folder_id(self, folder_path: str) -> str: """ @@ -252,7 +259,9 @@ def _get_sharepoint_folder_id(self, folder_path: str) -> str: if response.status_code == 200 and "id" in response.json(): return response.json()["id"] else: - raise ValueError(response.json()["error"]) + error_message = response.json().get("error", "Unknown error") + logger.error("Error retrieving folder ID: %s", error_message) + raise ValueError(f"Error retrieving folder ID: {error_message}") def _download_files_and_extract_metadata( self, @@ -300,9 +309,14 @@ def _get_file_content_by_url(self, item: Dict[str, Any]) -> bytes: """ file_download_url = item["@microsoft.graph.downloadUrl"] response = requests.get(file_download_url) + if response.status_code != 200: - logger.error(response.json()["error"]) - raise ValueError(response.json()["error_description"]) + json_response = response.json() + error_message = json_response.get("error_description") or json_response.get( + "error" + ) + logger.error("Error downloading file content: %s", json_response["error"]) + raise ValueError(f"Error downloading file content: {error_message}") return response.content @@ -788,10 +802,8 @@ def load_resource(self, resource_id: str, **kwargs) -> List[Document]: item = self._get_item_from_path(path) - input_file_dir = path.parent - with tempfile.TemporaryDirectory() as temp_dir: - metadata = self._download_file(item, temp_dir, input_file_dir) + metadata = self._download_file(item, temp_dir) return self._load_documents_with_metadata( metadata, temp_dir, recursive=False ) diff --git a/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/pyproject.toml index b4ab9d40b29ca..2565ce2196900 100644 --- a/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-microsoft-sharepoint/pyproject.toml @@ -29,7 +29,7 @@ license = "MIT" maintainers = ["arun-soliton"] name = "llama-index-readers-microsoft-sharepoint" readme = "README.md" -version = "0.3.0" +version = "0.3.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/readers/llama-index-readers-minio/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-minio/pyproject.toml index 1d3c095dd740c..e578c84a84338 100644 --- a/llama-index-integrations/readers/llama-index-readers-minio/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-minio/pyproject.toml @@ -12,7 +12,7 @@ contains_example = false import_path = "llama_index.readers.minio" [tool.llamahub.class_authors] -BotoMinioReader = "llama-index" +BotoMinioReader = "webcoderz" MinioReader = "llama-index" [tool.mypy] diff --git a/llama-index-integrations/readers/llama-index-readers-mongodb/README.md b/llama-index-integrations/readers/llama-index-readers-mongodb/README.md index 385fa0d23c8de..9533946d476d0 100644 --- a/llama-index-integrations/readers/llama-index-readers-mongodb/README.md +++ b/llama-index-integrations/readers/llama-index-readers-mongodb/README.md @@ -15,7 +15,7 @@ pip install llama-index-readers-mongodb ### Usage ```python -from llama_index.readers.mongo import SimpleMongoReader +from llama_index.readers.mongodb import SimpleMongoReader # Initialize SimpleMongoReader reader = SimpleMongoReader( diff --git a/llama-index-integrations/readers/llama-index-readers-web/llama_index/readers/web/firecrawl_web/base.py b/llama-index-integrations/readers/llama-index-readers-web/llama_index/readers/web/firecrawl_web/base.py index ebdc4d98cf69a..7367dc57e3e5a 100644 --- a/llama-index-integrations/readers/llama-index-readers-web/llama_index/readers/web/firecrawl_web/base.py +++ b/llama-index-integrations/readers/llama-index-readers-web/llama_index/readers/web/firecrawl_web/base.py @@ -87,6 +87,7 @@ def load_data( ) elif self.mode == "crawl": firecrawl_docs = self.firecrawl.crawl_url(url, params=self.params) + firecrawl_docs = firecrawl_docs.get("data", []) for doc in firecrawl_docs: documents.append( Document( diff --git a/llama-index-integrations/readers/llama-index-readers-web/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-web/pyproject.toml index 18d8e29a5958f..eb38ea0f97036 100644 --- a/llama-index-integrations/readers/llama-index-readers-web/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-web/pyproject.toml @@ -45,7 +45,7 @@ license = "MIT" maintainers = ["HawkClaws", "Hironsan", "NA", "an-bluecat", "bborn", "jasonwcfan", "kravetsmic", "pandazki", "ruze00", "selamanse", "thejessezhang"] name = "llama-index-readers-web" readme = "README.md" -version = "0.2.1" +version = "0.2.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/.gitignore b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/BUILD b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/Makefile b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/README.md b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/README.md new file mode 100644 index 0000000000000..50ad75f53b45b --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/README.md @@ -0,0 +1,99 @@ +# LlamaIndex Selectors Integration: NotDiamond + +[Not Diamond](https://notdiamond.ai) offers an AI-powered model router that automatically determines which LLM is best suited to respond to any query, improving LLM output quality by combining multiple LLMs into a **meta-model** that learns when to call each LLM. + +Not Diamond supports cost and latency tradeoffs, customized router training, and real-time router personalization. Learn more via [the documentation](https://notdiamond.readme.io/). + +## Installation + +```shell +pip install llama-index-selectors-notdiamond +``` + +## Configuration + +- API keys: You need API keys from [Not Diamond](https://app.notdiamond.ai/keys) and any LLM you want to use. + +## Quick Start + +```python +import os +from typing import List + +from llama_index.core import ( + SimpleDirectoryReader, + VectorStoreIndex, + SummaryIndex, + Settings, +) +from llama_index.core.query_engine import RouterQueryEngine +from llama_index.core.tools import QueryEngineTool +from llama_index.selectors.notdiamond.base import NotDiamondSelector + +from notdiamond import NotDiamond + + +# Set up your API keys +os.environ["OPENAI_API_KEY"] = "sk-..." +os.environ["ANTHROPIC_API_KEY"] = "sk-ant-..." +os.environ["NOTDIAMOND_API_KEY"] = "sk-..." + + +# Create indexes +documents = SimpleDirectoryReader("data/paul_graham").load_data() +nodes = Settings.node_parser.get_nodes_from_documents(documents) + +vector_index = VectorStoreIndex.from_documents(documents) +summary_index = SummaryIndex.from_documents(documents) +query_text = "What was Paul Graham's role at Yahoo?" + + +# Set up Tools for the QueryEngine +list_query_engine = summary_index.as_query_engine( + response_mode="tree_summarize", + use_async=True, +) +vector_query_engine = vector_index.as_query_engine() + +list_tool = QueryEngineTool.from_defaults( + query_engine=list_query_engine, + description=( + "Useful for summarization questions related to Paul Graham eassy on" + " What I Worked On." + ), +) + +vector_tool = QueryEngineTool.from_defaults( + query_engine=vector_query_engine, + description=( + "Useful for retrieving specific context from Paul Graham essay on What" + " I Worked On." + ), +) + + +# Create a NotDiamondSelector and RouterQueryEngine +client = NotDiamond( + api_key=os.environ["NOTDIAMOND_API_KEY"], + llm_configs=["openai/gpt-4o", "anthropic/claude-3-5-sonnet-20240620"], +) +preference_id = client.create_preference_id() +client.preference_id = preference_id + +nd_selector = NotDiamondSelector(client=client) + +query_engine = RouterQueryEngine( + selector=nd_selector, + query_engine_tools=[ + list_tool, + vector_tool, + ], +) + + +# Use Not Diamond to Query Indexes +response = query_engine.query( + "Please summarize Paul Graham's working experience." +) +print(str(response)) +``` diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/BUILD b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/__init__.py b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/__init__.py new file mode 100644 index 0000000000000..6f2a506585d89 --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/__init__.py @@ -0,0 +1,4 @@ +from llama_index.selectors.notdiamond.base import NotDiamondSelector + + +__all__ = ["NotDiamondSelector"] diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/base.py b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/base.py new file mode 100644 index 0000000000000..083858c54baeb --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/llama_index/selectors/notdiamond/base.py @@ -0,0 +1,178 @@ +import logging +import os +from typing import Sequence + +from llama_index.core.llms.llm import LLM +from llama_index.core.schema import QueryBundle +from llama_index.core.tools.types import ToolMetadata +from llama_index.core.base.base_selector import SelectorResult +from llama_index.core.selectors import LLMSingleSelector +from llama_index.core.selectors.llm_selectors import _build_choices_text + +from notdiamond import NotDiamond, LLMConfig, Metric + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.WARNING) + + +class NotDiamondSelectorResult(SelectorResult): + """A single selection of a choice provided by Not Diamond.""" + + class Config: + arbitrary_types_allowed = True + + session_id: str + llm: LLMConfig + + @classmethod + def from_selector_result( + cls, selector_result: SelectorResult, session_id: str, best_llm: LLMConfig + ) -> "NotDiamondSelectorResult": + return cls(session_id=session_id, llm=best_llm, **selector_result.dict()) + + +class NotDiamondSelector(LLMSingleSelector): + def __init__( + self, + client: NotDiamond, + metric: Metric = None, + timeout: int = 10, + api_key: str = None, + *args, + **kwargs, + ): + """ + Initialize a NotDiamondSelector. Users should instantiate and configure a NotDiamond client as needed before + creating this selector. The constructor will raise errors re: required client fields. + """ + # Not needed - we will route using our own client based on the query prompt + # Add @property for _llm here + _encap_selector = LLMSingleSelector.from_defaults() + self._llm = None + self._prompt = _encap_selector._prompt + + if not getattr(client, "llm_configs", None): + raise ValueError( + "NotDiamond client must have llm_configs before creating a NotDiamondSelector." + ) + + if metric and not isinstance(metric, Metric): + raise ValueError(f"Invalid metric - needed type Metric but got {metric}") + self._metric = metric or Metric("accuracy") + + self._client = client + self._llms = [ + self._llm_config_to_client(llm_config) + for llm_config in self._client.llm_configs + ] + self._timeout = timeout + super().__init__(_encap_selector._llm, _encap_selector._prompt, *args, **kwargs) + + def _llm_config_to_client(self, llm_config: LLMConfig | str) -> LLM: + """ + For the selected LLMConfig dynamically create an LLM instance. NotDiamondSelector will + assign this to self._llm to help select the best index. + """ + if isinstance(llm_config, str): + llm_config = LLMConfig.from_string(llm_config) + provider, model = llm_config.provider, llm_config.model + + output = None + if provider == "openai": + from llama_index.llms.openai import OpenAI + + output = OpenAI(model=model, api_key=os.getenv("OPENAI_API_KEY")) + elif provider == "anthropic": + from llama_index.llms.anthropic import Anthropic + + output = Anthropic(model=model, api_key=os.getenv("ANTHROPIC_API_KEY")) + elif provider == "cohere": + from llama_index.llms.cohere import Cohere + + output = Cohere(model=model, api_key=os.getenv("COHERE_API_KEY")) + elif provider == "mistral": + from llama_index.llms.mistralai import MistralAI + + output = MistralAI(model=model, api_key=os.getenv("MISTRALAI_API_KEY")) + elif provider == "togetherai": + from llama_index.llms.together import TogetherLLM + + output = TogetherLLM(model=model, api_key=os.getenv("TOGETHERAI_API_KEY")) + else: + raise ValueError(f"Unsupported provider for NotDiamondSelector: {provider}") + + return output + + def _select( + self, choices: Sequence[ToolMetadata], query: QueryBundle, timeout: int = None + ) -> SelectorResult: + """ + Call Not Diamond to select the best LLM for the given prompt, then have the LLM select the best tool. + """ + messages = [ + {"role": "system", "content": self._format_prompt(choices, query)}, + {"role": "user", "content": query.query_str}, + ] + + session_id, best_llm = self._client.model_select( + messages=messages, + llm_configs=self._client.llm_configs, + metric=self._metric, + notdiamond_api_key=self._client.api_key, + max_model_depth=self._client.max_model_depth, + hash_content=self._client.hash_content, + tradeoff=self._client.tradeoff, + preference_id=self._client.preference_id, + tools=self._client.tools, + timeout=timeout or self._timeout, + ) + + self._llm = self._llm_config_to_client(best_llm) + + return NotDiamondSelectorResult.from_selector_result( + super()._select(choices, query), session_id, best_llm + ) + + async def _aselect( + self, choices: Sequence[ToolMetadata], query: QueryBundle, timeout: int = None + ) -> SelectorResult: + """ + Call Not Diamond asynchronously to select the best LLM for the given prompt, then have the LLM select the best tool. + """ + messages = [ + {"role": "system", "content": self._format_prompt(choices, query)}, + {"role": "user", "content": query.query_str}, + ] + + session_id, best_llm = await self._client.amodel_select( + messages=messages, + llm_configs=self._client.llm_configs, + metric=self._metric, + notdiamond_api_key=self._client.api_key, + max_model_depth=self._client.max_model_depth, + hash_content=self._client.hash_content, + tradeoff=self._client.tradeoff, + preference_id=self._client.preference_id, + tools=self._client.tools, + timeout=timeout or self._timeout, + ) + + self._llm = self._llm_config_to_client(best_llm) + + return NotDiamondSelectorResult.from_selector_result( + await super()._aselect(choices, query), session_id, best_llm + ) + + def _format_prompt( + self, choices: Sequence[ToolMetadata], query: QueryBundle + ) -> str: + """ + A system prompt for selection is created when instantiating the parent LLMSingleSelector class. + This method formats the prompt into a str so that it can be serialized for the NotDiamond API. + """ + context_list = _build_choices_text(choices) + return self._prompt.format( + num_choices=len(choices), + context_list=context_list, + query_str=query.query_str, + ) diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/pyproject.toml b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/pyproject.toml new file mode 100644 index 0000000000000..7cfb12f5b3205 --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/pyproject.toml @@ -0,0 +1,56 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +# Feel free to un-skip examples, and experimental, you will just need to +# work through many typos (--write-changes and --interactive will help) +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = true +import_path = "llama_index.selectors.notdiamond" + +[tool.llamahub.class_authors] +NotDiamondSelector = "acompa" + +[tool.mypy] +disallow_untyped_defs = true +# Remove venv skip when integrated with pre-commit +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.11" + +[tool.poetry] +authors = ["Not Diamond "] +description = "llama-index selectors Not Diamond integration" +name = "llama-index-selectors-notdiamond" +packages = [{include = "llama_index/"}] +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.10.0,<3.12" +llama-index-core = "^0.10.0" +notdiamond = "^0.3.5" + +[tool.poetry.group.dev.dependencies] +black = {extras = ["jupyter"], version = "<=23.9.1,>=23.7.0"} +codespell = {extras = ["toml"], version = ">=v2.2.6"} +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" # TODO: unpin when mypy>0.991 +types-setuptools = "67.1.0.0" diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/BUILD b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/BUILD new file mode 100644 index 0000000000000..db53f45e8d68e --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/BUILD @@ -0,0 +1,3 @@ +python_tests( + interpreter_constraints=["==3.10.*"] +) diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/__init__.py b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/test_selectors_notdiamond.py b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/test_selectors_notdiamond.py new file mode 100644 index 0000000000000..754f96a686f8f --- /dev/null +++ b/llama-index-integrations/selectors/llama-index-selectors-notdiamond/tests/test_selectors_notdiamond.py @@ -0,0 +1,95 @@ +import os +import pytest +from typing import List +from unittest.mock import MagicMock, patch, AsyncMock +import uuid + +from llama_index.core.base.base_selector import ( + SelectorResult, + SingleSelection, +) +from llama_index.core.schema import QueryBundle +from llama_index.core.tools import ToolMetadata +from llama_index.selectors.notdiamond.base import NotDiamondSelector, LLMSingleSelector + +from notdiamond import LLMConfig + + +@pytest.fixture() +def session_id() -> str: + return str(uuid.uuid4()) + + +@pytest.fixture() +def choices() -> List[ToolMetadata]: + return [ + ToolMetadata( + name="vector_index", description="Great for asking questions about recipes." + ), + ToolMetadata(name="list_index", description="Great for summarizing recipes."), + ] + + +@pytest.fixture() +def nd_selector(session_id): + from notdiamond import NotDiamond + + os.environ["OPENAI_API_KEY"] = "test" + os.environ["ANTHROPIC_API_KEY"] = "test" + + llm_configs = [ + LLMConfig(provider="openai", model="gpt-4o"), + LLMConfig(provider="anthropic", model="claude-3-opus-20240229"), + ] + + # mocking out model_select calls on client + _client = MagicMock(stub=NotDiamond, api_key="test", llm_configs=llm_configs) + _client.model_select.return_value = (session_id, llm_configs[0]) + + async def aselect(*args, **kwargs): + return (session_id, llm_configs[0]) + + _client.amodel_select = aselect + selector = NotDiamondSelector(client=_client) + + # monkeypatch the _select and _aselect methods on parent class of NDSelector + LLMSingleSelector._select = MagicMock( + return_value=SelectorResult( + selections=[SingleSelection(index=0, reason="test")] + ) + ) + LLMSingleSelector._aselect = AsyncMock( + return_value=SelectorResult( + selections=[SingleSelection(index=1, reason="test")] + ) + ) + + return selector + + +class TestNotDiamondSelector: + @patch("llama_index.llms.openai.OpenAI") + def test_select(self, openai_mock, nd_selector, choices, session_id): + """_select should call openai, as mocked.""" + openai_mock.return_value = MagicMock() + openai_mock.return_value.chat.return_value.message.content = "vector_index" + query = "Please describe the llama_index framework in 280 characters or less." + result = nd_selector._select(choices, QueryBundle(query_str=query)) + assert result.session_id == session_id + assert str(result.llm) == "openai/gpt-4o" + assert result.selections[0].index == 0 + assert openai_mock.is_called + + @pytest.mark.asyncio() + @patch("llama_index.llms.anthropic.Anthropic") + async def test_aselect(self, anthropic_mock, nd_selector, choices, session_id): + """_aselect should call anthropic, as mocked.""" + anthropic_mock.return_value = MagicMock() + anthropic_mock.return_value.chat.return_value.message.content = "vector_index" + + query = "How can I cook a vegan variant of deviled eggs?" + result = await nd_selector._aselect(choices, QueryBundle(query_str=query)) + assert result.session_id == session_id + assert str(result.llm) == "anthropic/claude-3-opus-20240229" + assert result.selections[0].index == 1 + assert anthropic_mock.is_called diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/.gitignore b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/BUILD b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/Makefile b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/README.md b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/README.md new file mode 100644 index 0000000000000..b6c9baf30eac3 --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/README.md @@ -0,0 +1,57 @@ +# LlamaIndex Chat_Store Integration: DynamoDB Chat Store + +This enables AWS DynamoDB to be used as a chat store. + +## Installation + +```bash +pip install llama-index-storage-chat-store-dynamodb +``` + +## Usage + +### Assumptions + +- `SessionID`, a string, is used as the partition key. +- The table used for the chat store already exists. Here is an example for that: + + ```python + import boto3 + + dynamodb = boto3.resource("dynamodb") + + # Create the DynamoDB table. + table = dynamodb.create_table( + TableName="EXAMPLE_TABLE", + KeySchema=[{"AttributeName": "SessionId", "KeyType": "HASH"}], + AttributeDefinitions=[ + {"AttributeName": "SessionId", "AttributeType": "S"} + ], + BillingMode="PAY_PER_REQUEST", + ) + ``` + +### Using an AWS IAM Role + +You can use any of the following AWS arguments to setup the required `boto3` resource connection: + +- `profile_name` +- `aws_access_key_id` +- `aws_secret_access_key` +- `aws_session_token` +- `botocore_session` - A pre-existing existing Botocore session. +- `botocore_config` + +As an example, if you have already assumed an AWS profile in your local environment or within an AWS compute +environment, you can simply do the following: + +```python +import os +from llama_index.storage.chat_store.dynamodb.base import DynamoDBChatStore + +store = DynamoDBChatStore( + profile_name=os.getenv("AWS_PROFILE"), + table_name="EXAMPLE_TABLE", + session_id="123", +) +``` diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/BUILD b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/__init__.py b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/__init__.py new file mode 100644 index 0000000000000..48dfc0c164bfd --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/__init__.py @@ -0,0 +1,3 @@ +from llama_index.storage.chat_store.dynamodb.base import DynamoDBChatStore + +__all__ = ["DynamoDBChatStore"] diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/base.py b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/base.py new file mode 100644 index 0000000000000..880334a88cbc4 --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/llama_index/storage/chat_store/dynamodb/base.py @@ -0,0 +1,282 @@ +import logging +from typing import Any, Dict, List, Optional + +from llama_index.core.base.llms.types import ChatMessage +from llama_index.core.bridge.pydantic import Field, PrivateAttr +from llama_index.core.storage.chat_store.base import BaseChatStore +from mypy_boto3_dynamodb import ServiceResource + + +logger = logging.getLogger(__name__) + + +# Convert a list of ChatMessages to a list of JSON objects +def _messages_to_dict(messages: List[ChatMessage]) -> List[dict]: + return [_message_to_dict(message) for message in messages] + + +# Convert a ChatMessage to a JSON object +def _message_to_dict(message: ChatMessage) -> dict: + return message.dict() + + +# Convert a JSON object to a ChatMessage +def _dict_to_message(d: dict) -> ChatMessage: + return ChatMessage.model_validate(d) + + +class DynamoDBChatStore(BaseChatStore): + """DynamoDB Chat Store. + + Args: + table_name (str): The name of the preexisting DynamoDB table. + primary_key (str, optional): The primary/partition key to use for the table. + Defaults to "SessionId". + profile_name (str, optional): The AWS profile to use. If not specified, then + the default AWS profile is used. + aws_access_key_id (str, optional): The AWS Access Key ID to use. + aws_secret_access_key (str, optional): The AWS Secret Access Key to use. + aws_session_token (str, optional): The AWS Session Token to use. + botocore_session (Any, optional): Use this Botocore session instead of creating a new default one. + botocore_config (Any, optional): Custom configuration object to use instead of the default generated one. + region_name (str, optional): The AWS region name to use. Uses the region configured in AWS CLI if not passed. + max_retries (int, optional): The maximum number of API retries. Defaults to 10. + timeout (float, optional): The timeout for API requests in seconds. Defaults to 60.0. + session_kwargs (Dict[str, Any], optional): Additional kwargs for the `boto3.Session` object. + resource_kwargs (Dict[str, Any], optional): Additional kwargs for the `boto3.Resource` object. + + Returns: + DynamoDBChatStore: A DynamoDB chat store object. + """ + + table_name: str = Field(description="DynamoDB table") + primary_key: str = Field( + default="SessionId", description="Primary/partition key to use for the table." + ) + profile_name: Optional[str] = Field( + description="AWS profile to use. If not specified, then the default AWS profile is used." + ) + aws_access_key_id: Optional[str] = Field( + description="AWS Access Key ID to use.", exclude=True + ) + aws_secret_access_key: Optional[str] = Field( + description="AWS Secret Access Key to use.", exclude=True + ) + aws_session_token: Optional[str] = Field( + description="AWS Session Token to use.", exclude=True + ) + botocore_session: Optional[Any] = Field( + description="Use this Botocore session instead of creating a new default one.", + exclude=True, + ) + botocore_config: Optional[Any] = Field( + description="Custom configuration object to use instead of the default generated one.", + exclude=True, + ) + region_name: Optional[str] = Field( + description="AWS region name to use. Uses the region configured in AWS CLI if not passed", + exclude=True, + ) + max_retries: int = Field( + default=10, description="The maximum number of API retries.", gt=0 + ) + timeout: float = Field( + default=60.0, + description="The timeout for API requests in seconds.", + ) + session_kwargs: Dict[str, Any] = Field( + default_factory=dict, + description="Additional kwargs for the `boto3.Session` object.", + ) + resource_kwargs: Dict[str, Any] = Field( + default_factory=dict, + description="Additional kwargs for the `boto3.Resource` object.", + ) + + _client: ServiceResource = PrivateAttr() + _table: Any = PrivateAttr() + + def __init__( + self, + table_name: str, + primary_key: str = "SessionId", + profile_name: Optional[str] = None, + region_name: Optional[str] = None, + aws_access_key_id: Optional[str] = None, + aws_secret_access_key: Optional[str] = None, + aws_session_token: Optional[str] = None, + botocore_session: Optional[Any] = None, + botocore_config: Optional[Any] = None, + max_retries: int = 10, + timeout: float = 60.0, + session_kwargs: Optional[Dict[str, Any]] = None, + resource_kwargs: Optional[Dict[str, Any]] = None, + ): + session_kwargs = session_kwargs or {} + resource_kwargs = resource_kwargs or {} + + super().__init__( + table_name=table_name, + primary_key=primary_key, + profile_name=profile_name, + region_name=region_name, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + botocore_session=botocore_session, + botocore_config=botocore_config, + max_retries=max_retries, + timeout=timeout, + session_kwargs=session_kwargs, + resource_kwargs=resource_kwargs, + ) + + session_kwargs = { + "profile_name": profile_name, + "region_name": region_name, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "botocore_session": botocore_session, + **session_kwargs, + } + + try: + import boto3 + from botocore.config import Config + + config = ( + Config( + retries={"max_attempts": max_retries, "mode": "standard"}, + connect_timeout=timeout, + read_timeout=timeout, + ) + if botocore_config is None + else botocore_config + ) + session = boto3.Session(**session_kwargs) + except ImportError: + raise ImportError( + "boto3 package not found, install with 'pip install boto3" + ) + + self._client = session.resource("dynamodb", config=config, **resource_kwargs) + self._table = self._client.Table(table_name) + + @classmethod + def class_name(self) -> str: + return "DynamoDBChatStore" + + def set_messages(self, key: str, messages: List[ChatMessage]) -> None: + """Assign all provided messages to the row with the given key. + Any pre-existing messages for that key will be overwritten. + + Args: + key (str): The key specifying a row. + messages (List[ChatMessage]): The messages to assign to the key. + + Returns: + None + """ + self._table.put_item( + Item={self.primary_key: key, "History": _messages_to_dict(messages)} + ) + + def get_messages(self, key: str) -> List[ChatMessage]: + """Retrieve all messages for the given key. + + Args: + key (str): The key specifying a row. + + Returns: + List[ChatMessage]: The messages associated with the key. + """ + response = self._table.get_item(Key={self.primary_key: key}) + + if response and "Item" in response: + message_history = response["Item"]["History"] + else: + message_history = [] + + return [_dict_to_message(message) for message in message_history] + + def add_message(self, key: str, message: ChatMessage) -> None: + """Add a message to the end of the chat history for the given key. + Creates a new row if the key does not exist. + + Args: + key (str): The key specifying a row. + message (ChatMessage): The message to add to the chat history. + + Returns: + None + """ + current_messages = _messages_to_dict(self.get_messages(key)) + current_messages.append(_message_to_dict(message)) + + self._table.put_item(Item={self.primary_key: key, "History": current_messages}) + + def delete_messages(self, key: str) -> Optional[List[ChatMessage]]: + """Deletes the entire chat history for the given key (i.e. the row). + + Args: + key (str): The key specifying a row. + + Returns: + Optional[List[ChatMessage]]: The messages that were deleted. None if the + deletion failed. + """ + messages_to_delete = self.get_messages(key) + self._table.delete_item(Key={self.primary_key: key}) + return messages_to_delete + + def delete_message(self, key: str, idx: int) -> Optional[ChatMessage]: + """Deletes the message at the given index for the given key. + + Args: + key (str): The key specifying a row. + idx (int): The index of the message to delete. + + Returns: + Optional[ChatMessage]: The message that was deleted. None if the index + did not exist. + """ + current_messages = self.get_messages(key) + try: + message_to_delete = current_messages[idx] + del current_messages[idx] + self.set_messages(key, current_messages) + return message_to_delete + except IndexError: + logger.error( + IndexError(f"No message exists at index, {idx}, for key {key}") + ) + return None + + def delete_last_message(self, key: str) -> Optional[ChatMessage]: + """Deletes the last message in the chat history for the given key. + + Args: + key (str): The key specifying a row. + + Returns: + Optional[ChatMessage]: The message that was deleted. None if the chat history + was empty. + """ + return self.delete_message(key, -1) + + def get_keys(self) -> List[str]: + """Retrieve all keys in the table. + + Returns: + List[str]: The keys in the table. + """ + response = self._table.scan(ProjectionExpression=self.primary_key) + keys = [item[self.primary_key] for item in response["Items"]] + while "LastEvaluatedKey" in response: + response = self._table.scan( + ProjectionExpression=self.primary_key, + ExclusiveStartKey=response["LastEvaluatedKey"], + ) + keys.extend([item[self.primary_key] for item in response["Items"]]) + return keys diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/pyproject.toml b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/pyproject.toml new file mode 100644 index 0000000000000..5b3daaf02b26d --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/pyproject.toml @@ -0,0 +1,65 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.storage.chat_store.dynamodb" + +[tool.llamahub.class_authors] +DynamoDBChatStore = "brycecf" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["brycecf <26725654+brycecf@users.noreply.github.com>"] +description = "llama-index storage-chat-store aws dynamodb integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-storage-chat-store-dynamodb" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +llama-index-core = "^0.11.0" +boto3 = "^1.35.13" +boto3-stubs = {extras = ["dynamodb"], version = "^1.35.14"} + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +moto = {extras = ["dynamodb"], version = "^5.0.14"} +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/BUILD b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/__init__.py b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/test_chat_store_dynamodb_chat_store.py b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/test_chat_store_dynamodb_chat_store.py new file mode 100644 index 0000000000000..579fab5a0833c --- /dev/null +++ b/llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-dynamodb/tests/test_chat_store_dynamodb_chat_store.py @@ -0,0 +1,89 @@ +import pytest +from moto import mock_aws +import boto3 +from llama_index.storage.chat_store.dynamodb.base import DynamoDBChatStore +from llama_index.core.base.llms.types import ChatMessage + + +@pytest.fixture() +def aws_credentials(): + """Mocked AWS Credentials for moto.""" + import os + + os.environ["AWS_ACCESS_KEY_ID"] = "testing" + os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" + os.environ["AWS_SECURITY_TOKEN"] = "testing" + os.environ["AWS_SESSION_TOKEN"] = "testing" + + +@pytest.fixture() +def dynamo_db(aws_credentials): + with mock_aws(): + yield boto3.resource("dynamodb", region_name="us-east-1") + + +@pytest.fixture() +def chat_store(dynamo_db): + dynamo_db.create_table( + TableName="TestTable", + KeySchema=[{"AttributeName": "SessionId", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "SessionId", "AttributeType": "S"}], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}, + ) + return DynamoDBChatStore(table_name="TestTable", region_name="us-east-1") + + +def test_set_get_messages(chat_store): + messages = [ChatMessage(content="Hello"), ChatMessage(content="World")] + chat_store.set_messages("TestSession", messages) + retrieved_messages = chat_store.get_messages("TestSession") + assert len(retrieved_messages) == 2 + assert retrieved_messages[0].content == "Hello" + assert retrieved_messages[1].content == "World" + + +def test_add_message(chat_store): + initial_message = ChatMessage(content="Initial") + chat_store.add_message("TestSession", initial_message) + added_message = ChatMessage(content="Added") + chat_store.add_message("TestSession", added_message) + messages = chat_store.get_messages("TestSession") + assert len(messages) == 2 + assert messages[1].content == "Added" + + +def test_delete_messages(chat_store): + messages = [ChatMessage(content="Hello"), ChatMessage(content="World")] + chat_store.set_messages("TestSession", messages) + deleted_messages = chat_store.delete_messages("TestSession") + assert len(deleted_messages) == 2 + assert deleted_messages[0].content == "Hello" + assert chat_store.get_messages("TestSession") == [] + + +def test_delete_message(chat_store): + messages = [ChatMessage(content="First"), ChatMessage(content="Second")] + chat_store.set_messages("TestSession", messages) + chat_store.delete_message("TestSession", 0) + remaining_messages = chat_store.get_messages("TestSession") + assert len(remaining_messages) == 1 + assert remaining_messages[0].content == "Second" + + +def test_delete_last_message(chat_store): + messages = [ChatMessage(content="First"), ChatMessage(content="Second")] + chat_store.set_messages("TestSession", messages) + last_message = chat_store.delete_last_message("TestSession") + assert last_message.content == "Second" + remaining_messages = chat_store.get_messages("TestSession") + assert len(remaining_messages) == 1 + assert remaining_messages[0].content == "First" + + +def test_get_keys(chat_store): + chat_store._table.put_item(Item={"SessionId": "1", "History": []}) + chat_store._table.put_item(Item={"SessionId": "2", "History": []}) + keys = chat_store.get_keys() + assert len(keys) == 2 + assert "1" in keys + assert "2" in keys diff --git a/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/llama_index/storage/kvstore/elasticsearch/base.py b/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/llama_index/storage/kvstore/elasticsearch/base.py index ccd682b57f268..6389e3a7eb8a8 100644 --- a/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/llama_index/storage/kvstore/elasticsearch/base.py +++ b/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/llama_index/storage/kvstore/elasticsearch/base.py @@ -9,7 +9,7 @@ import asyncio import nest_asyncio import elasticsearch -from elasticsearch.helpers import async_bulk +from elasticsearch.helpers import async_bulk, async_scan logger = getLogger(__name__) @@ -280,12 +280,11 @@ async def aget_all(self, collection: str = DEFAULT_COLLECTION) -> Dict[str, dict """ await self._create_index_if_not_exists(collection) - q = {"query": {"match_all": {}}} - response = await self._client.search(index=collection, body=q, source=True) result = {} - for r in response["hits"]["hits"]: - doc_id = r["_id"] - content = r["_source"] + q = {"query": {"match_all": {}}} + async for doc in async_scan(client=self._client, index=collection, query=q): + doc_id = doc["_id"] + content = doc["_source"] result[doc_id] = content return result diff --git a/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/pyproject.toml b/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/pyproject.toml index 656b296628407..d6287e7444be1 100644 --- a/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/pyproject.toml +++ b/llama-index-integrations/storage/kvstore/llama-index-storage-kvstore-elasticsearch/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-storage-kvstore-elasticsearch" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/tools/llama-index-tools-box/llama_index/tools/box/search/base.py b/llama-index-integrations/tools/llama-index-tools-box/llama_index/tools/box/search/base.py index 5cfe7d3d0c3cd..b29cdcd0aeb2b 100644 --- a/llama-index-integrations/tools/llama-index-tools-box/llama_index/tools/box/search/base.py +++ b/llama-index-integrations/tools/llama-index-tools-box/llama_index/tools/box/search/base.py @@ -58,6 +58,32 @@ class BoxSearchOptions: limit: Optional[int] = None offset: Optional[int] = None + def __init__( + self, + scope: Optional[SearchForContentScope] = None, + file_extensions: Optional[List[str]] = None, + created_at_range: Optional[List[str]] = None, + updated_at_range: Optional[List[str]] = None, + size_range: Optional[List[int]] = None, + owner_user_ids: Optional[List[str]] = None, + recent_updater_user_ids: Optional[List[str]] = None, + ancestor_folder_ids: Optional[List[str]] = None, + content_types: Optional[List[SearchForContentContentTypes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + ) -> None: + self.scope = scope + self.file_extensions = file_extensions + self.created_at_range = created_at_range + self.updated_at_range = updated_at_range + self.size_range = size_range + self.owner_user_ids = owner_user_ids + self.recent_updater_user_ids = recent_updater_user_ids + self.ancestor_folder_ids = ancestor_folder_ids + self.content_types = content_types + self.limit = limit + self.offset = offset + class BoxSearchToolSpec(BaseToolSpec): """ diff --git a/llama-index-integrations/tools/llama-index-tools-box/pyproject.toml b/llama-index-integrations/tools/llama-index-tools-box/pyproject.toml index 6f226d1cf64cb..bca3f8c6c89bd 100644 --- a/llama-index-integrations/tools/llama-index-tools-box/pyproject.toml +++ b/llama-index-integrations/tools/llama-index-tools-box/pyproject.toml @@ -34,7 +34,7 @@ license = "MIT" name = "llama-index-tools-box" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/tools/llama-index-tools-box/tests/test_tools_box_search.py b/llama-index-integrations/tools/llama-index-tools-box/tests/test_tools_box_search.py index 8f46cb31ef582..95e2f69b93f0c 100644 --- a/llama-index-integrations/tools/llama-index-tools-box/tests/test_tools_box_search.py +++ b/llama-index-integrations/tools/llama-index-tools-box/tests/test_tools_box_search.py @@ -19,6 +19,17 @@ def test_box_tool_search(box_client_ccg_integration_testing: BoxClient): assert len(docs) > 0 +def test_box_tool_search_options(box_client_ccg_integration_testing: BoxClient): + options = BoxSearchOptions(file_extensions=["pdf"]) + options.limit = 5 + + box_tool = BoxSearchToolSpec(box_client_ccg_integration_testing, options=options) + + query = "sample" + docs = box_tool.box_search(query=query) + assert len(docs) > 0 + + def test_box_tool_search_agent(box_client_ccg_integration_testing: BoxClient): test_data = get_testing_data() openai_api_key = test_data["openai_api_key"] diff --git a/llama-index-integrations/tools/llama-index-tools-duckduckgo/pyproject.toml b/llama-index-integrations/tools/llama-index-tools-duckduckgo/pyproject.toml index d7a08da5d58b2..19b0272c58756 100644 --- a/llama-index-integrations/tools/llama-index-tools-duckduckgo/pyproject.toml +++ b/llama-index-integrations/tools/llama-index-tools-duckduckgo/pyproject.toml @@ -29,11 +29,11 @@ license = "MIT" maintainers = ["leehuwuj"] name = "llama-index-tools-duckduckgo" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -llama-index = "^0.10.1" +llama-index = "^0.11.2" duckduckgo-search = "^6.1.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/tools/llama-index-tools-exa/examples/exa.ipynb b/llama-index-integrations/tools/llama-index-tools-exa/examples/exa.ipynb index abae2d773774a..3ec503aecc68b 100644 --- a/llama-index-integrations/tools/llama-index-tools-exa/examples/exa.ipynb +++ b/llama-index-integrations/tools/llama-index-tools-exa/examples/exa.ipynb @@ -7,7 +7,7 @@ "source": [ "# Building a Exa Search Powered Data Agent\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This tutorial walks through using the LLM tools provided by the [Exa API](https://exa.ai) to allow LLMs to use semantic queries to search for and retrieve rich web content from the internet.\n", "\n", diff --git a/llama-index-integrations/tools/llama-index-tools-tavily-research/examples/tavily.ipynb b/llama-index-integrations/tools/llama-index-tools-tavily-research/examples/tavily.ipynb index 587088f27e472..d42ecb0286f8f 100644 --- a/llama-index-integrations/tools/llama-index-tools-tavily-research/examples/tavily.ipynb +++ b/llama-index-integrations/tools/llama-index-tools-tavily-research/examples/tavily.ipynb @@ -7,7 +7,7 @@ "source": [ "# Building a Tavily Data Agent\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This tutorial walks through using the LLM tools provided by the [Tavily API](https://app.tavily.com/) to allow LLMs to easily search and retrieve relevant content from the Internet.\n", "\n", diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/.gitignore b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/Makefile b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/README.md b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/README.md new file mode 100644 index 0000000000000..ae0f783f8498e --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/README.md @@ -0,0 +1,65 @@ +# Azure Cosmos DB for NoSQL Vector Store + +This integration makes possible to use [Azure Cosmos DB for NoSQL](https://learn.microsoft.com/en-us/azure/cosmos-db/nosql/) +as a vector store in LlamaIndex. + +## Quick start + +Install the integration with: + +```sh +pip install llama-index-vector-stores-azurecosmosnosql +``` + +Create the CosmosDB client: + +```python +URI = "AZURE_COSMOSDB_URI" +KEY = "AZURE_COSMOSDB_KEY" +client = CosmosClient(URI, credential=KEY) +``` + +Specify the vector store properties: + +```python +indexing_policy = { + "indexingMode": "consistent", + "includedPaths": [{"path": "/*"}], + "excludedPaths": [{"path": '/"_etag"/?'}], + "vectorIndexes": [{"path": "/embedding", "type": "quantizedFlat"}], +} + +vector_embedding_policy = { + "vectorEmbeddings": [ + { + "path": "/embedding", + "dataType": "float32", + "distanceFunction": "cosine", + "dimensions": 3072, + } + ] +} +``` + +Create the vector store: + +```python +store = AzureCosmosDBNoSqlVectorSearch( + cosmos_client=client, + vector_embedding_policy=vector_embedding_policy, + indexing_policy=indexing_policy, + cosmos_container_properties={"partition_key": PartitionKey(path="/id")}, + cosmos_database_properties={}, + create_container=True, +) +``` + +Finally, create the index from a list containing documents: + +```python +storage_context = StorageContext.from_defaults(vector_store=store) + +index = VectorStoreIndex.from_documents( + documents, storage_context=storage_context +) +``` diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/__init__.py new file mode 100644 index 0000000000000..74da620f48e9d --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/__init__.py @@ -0,0 +1,5 @@ +from llama_index.vector_stores.azurecosmosnosql.base import ( + AzureCosmosDBNoSqlVectorSearch, +) + +__all__ = ["AzureCosmosDBNoSqlVectorSearch"] diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/base.py new file mode 100644 index 0000000000000..7bb209946c723 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/llama_index/vector_stores/azurecosmosnosql/base.py @@ -0,0 +1,251 @@ +"""Azure CosmosDB NoSQL vCore Vector store index. + +An index that is built on top of an existing vector store. + +""" +import logging +from typing import Any, Optional, Dict, cast, List + +from azure.cosmos import CosmosClient +from llama_index.core.bridge.pydantic import PrivateAttr +from llama_index.core.schema import BaseNode, MetadataMode +from llama_index.core.vector_stores.types import ( + BasePydanticVectorStore, + VectorStoreQuery, + VectorStoreQueryResult, +) +from llama_index.core.vector_stores.utils import ( + metadata_dict_to_node, + node_to_metadata_dict, +) + +logger = logging.getLogger(__name__) + + +class AzureCosmosDBNoSqlVectorSearch(BasePydanticVectorStore): + """Azure CosmosDB NoSQL vCore Vector Store. + + To use, you should have both: + -the ``azure-cosmos`` python package installed + -from llama_index.vector_stores.azurecosmosnosql import AzureCosmosDBNoSqlVectorSearch + """ + + stores_text: bool = True + flat_metadata: bool = True + + _cosmos_client: Any = PrivateAttr() + _database_name: Any = PrivateAttr() + _container_name: Any = PrivateAttr() + _embedding_key: Any = PrivateAttr() + _vector_embedding_policy: Any = PrivateAttr() + _indexing_policy: Any = PrivateAttr() + _cosmos_container_properties: Any = PrivateAttr() + _cosmos_database_properties: Any = PrivateAttr() + _create_container: Any = PrivateAttr() + _database: Any = PrivateAttr() + _container: Any = PrivateAttr() + _id_key: Any = PrivateAttr() + _text_key: Any = PrivateAttr() + _metadata_key: Any = PrivateAttr() + + def __init__( + self, + cosmos_client: CosmosClient, + vector_embedding_policy: Dict[str, Any], + indexing_policy: Dict[str, Any], + cosmos_container_properties: Dict[str, Any], + cosmos_database_properties: Optional[Dict[str, Any]] = None, + database_name: str = "vectorSearchDB", + container_name: str = "vectorSearchContainer", + create_container: bool = True, + id_key: str = "id", + text_key: str = "text", + metadata_key: str = "metadata", + **kwargs: Any, + ) -> None: + """Initialize the vector store. + + Args: + cosmos_client: Client used to connect to azure cosmosdb no sql account. + database_name: Name of the database to be created. + container_name: Name of the container to be created. + embedding: Text embedding model to use. + vector_embedding_policy: Vector Embedding Policy for the container. + indexing_policy: Indexing Policy for the container. + cosmos_container_properties: Container Properties for the container. + cosmos_database_properties: Database Properties for the container. + """ + super().__init__() + + if cosmos_client is not None: + self._cosmos_client = cast(CosmosClient, cosmos_client) + + if create_container: + if ( + indexing_policy["vectorIndexes"] is None + or len(indexing_policy["vectorIndexes"]) == 0 + ): + raise ValueError( + "vectorIndexes cannot be null or empty in the indexing_policy." + ) + if ( + vector_embedding_policy is None + or len(vector_embedding_policy["vectorEmbeddings"]) == 0 + ): + raise ValueError( + "vectorEmbeddings cannot be null " + "or empty in the vector_embedding_policy." + ) + if ( + cosmos_container_properties is None + or cosmos_container_properties["partition_key"] is None + ): + raise ValueError( + "partition_key cannot be null " "or empty for a container." + ) + + self._database_name = database_name + self._container_name = container_name + self._vector_embedding_policy = vector_embedding_policy + self._indexing_policy = indexing_policy + self._cosmos_container_properties = cosmos_container_properties + self._cosmos_database_properties = cosmos_database_properties + self._id_key = id_key + self._text_key = text_key + self._metadata_key = metadata_key + self._embedding_key = self._vector_embedding_policy["vectorEmbeddings"][0][ + "path" + ][1:] + + self._database = self._cosmos_client.create_database_if_not_exists( + id=self._database_name, + offer_throughput=self._cosmos_database_properties.get("offer_throughput"), + session_token=self._cosmos_database_properties.get("session_token"), + initial_headers=self._cosmos_database_properties.get("initial_headers"), + etag=self._cosmos_database_properties.get("etag"), + match_condition=self._cosmos_database_properties.get("match_condition"), + ) + + # Create the collection if it already doesn't exist + self._container = self._database.create_container_if_not_exists( + id=self._container_name, + partition_key=self._cosmos_container_properties["partition_key"], + indexing_policy=self._indexing_policy, + default_ttl=self._cosmos_container_properties.get("default_ttl"), + offer_throughput=self._cosmos_container_properties.get("offer_throughput"), + unique_key_policy=self._cosmos_container_properties.get( + "unique_key_policy" + ), + conflict_resolution_policy=self._cosmos_container_properties.get( + "conflict_resolution_policy" + ), + analytical_storage_ttl=self._cosmos_container_properties.get( + "analytical_storage_ttl" + ), + computed_properties=self._cosmos_container_properties.get( + "computed_properties" + ), + etag=self._cosmos_container_properties.get("etag"), + match_condition=self._cosmos_container_properties.get("match_condition"), + session_token=self._cosmos_container_properties.get("session_token"), + initial_headers=self._cosmos_container_properties.get("initial_headers"), + vector_embedding_policy=self._vector_embedding_policy, + ) + + def add( + self, + nodes: List[BaseNode], + **add_kwargs: Any, + ) -> List[str]: + """Add nodes to index. + + Args: + nodes: List[BaseNode]: list of nodes with embeddings + + Returns: + A List of ids for successfully added nodes. + + """ + ids = [] + data_to_insert = [] + + if not nodes: + raise Exception("Texts can not be null or empty") + + for node in nodes: + metadata = node_to_metadata_dict( + node, remove_text=True, flat_metadata=self.flat_metadata + ) + + entry = { + self._id_key: node.node_id, + self._embedding_key: node.get_embedding(), + self._text_key: node.get_content(metadata_mode=MetadataMode.NONE) or "", + self._metadata_key: metadata, + } + data_to_insert.append(entry) + ids.append(node.node_id) + + for item in data_to_insert: + self._container.upsert_item(item) + + return ids + + def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None: + """ + Delete nodes using with ref_doc_id. + + Args: + ref_doc_id (str): The doc_id of the document to delete. + + """ + self._container.delete_item(ref_doc_id, partition_key=ref_doc_id) + + @property + def client(self) -> Any: + """Return CosmosDB client.""" + return self._cosmos_client + + def _query(self, query: VectorStoreQuery) -> VectorStoreQueryResult: + params: Dict[str, Any] = { + "vector": query.query_embedding, + "path": self._embedding_key, + "k": query.similarity_top_k, + } + + top_k_nodes = [] + top_k_ids = [] + top_k_scores = [] + + for item in self._container.query_items( + query="SELECT TOP @k c.id, c.embedding, c.text, c.metadata, VectorDistance(c.embedding,@embedding) AS SimilarityScore FROM c ORDER BY VectorDistance(c.embedding,@embedding)", + parameters=[ + {"name": "@k", "value": params["k"]}, + {"name": "@embedding", "value": params["vector"]}, + ], + enable_cross_partition_query=True, + ): + node = metadata_dict_to_node(item[self._metadata_key]) + node.set_content(item[self._text_key]) + + node_id = item[self._id_key] + node_score = item["SimilarityScore"] + + top_k_ids.append(node_id) + top_k_nodes.append(node) + top_k_scores.append(node_score) + + return VectorStoreQueryResult( + nodes=top_k_nodes, similarities=top_k_scores, ids=top_k_ids + ) + + def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult: + """Query index for top k most similar nodes. + + Args: + query: a VectorStoreQuery object. + + Returns: + A VectorStoreQueryResult containing the results of the query. + """ + return self._query(query) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/pyproject.toml new file mode 100644 index 0000000000000..2d830de9de68b --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.vector_stores.azurecosmosnosql" + +[tool.llamahub.class_authors] +AzureCosmosDBMongoDBVectorSearch = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["Your Name "] +description = "llama-index vector_stores azurecosmosnosql integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-vector-stores-azurecosmosnosql" +readme = "README.md" +version = "1.0.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +llama-index-core = "^0.10.1" +azure-cosmos = "^4.7.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/BUILD new file mode 100644 index 0000000000000..e09d64a6175b7 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/BUILD @@ -0,0 +1,3 @@ +python_tests( + name="tests0", +) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/test_azurecosmosnosql.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/test_azurecosmosnosql.py new file mode 100644 index 0000000000000..2956e7fce7afa --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/test_azurecosmosnosql.py @@ -0,0 +1,174 @@ +"""Test Azure CosmosDB NoSql Vector Search functionality.""" + +from __future__ import annotations + +from time import sleep +from typing import List + +import pytest + +try: + from azure.cosmos import CosmosClient, PartitionKey + + URL = "AZURE_COSMOSDB_URI" + KEY = "AZURE_COSMOSDB_KEY" + database_name = "test_database" + container_name = "test_container" + test_client = CosmosClient(URL, credential=KEY) + + indexing_policy = { + "indexingMode": "consistent", + "includedPaths": [{"path": "/*"}], + "excludedPaths": [{"path": '/"_etag"/?'}], + "vectorIndexes": [{"path": "/embedding", "type": "quantizedFlat"}], + } + + vector_embedding_policy = { + "vectorEmbeddings": [ + { + "path": "/embedding", + "dataType": "float32", + "distanceFunction": "cosine", + "dimensions": 1536, + } + ] + } + + partition_key = PartitionKey(path="/id") + cosmos_container_properties_test = {"partition_key": partition_key} + cosmos_database_properties_test = {} + + test_database = test_client.create_database_if_not_exists(id=database_name) + test_container = test_database.create_container_if_not_exists( + id=container_name, + partition_key=partition_key, + indexing_policy=indexing_policy, + vector_embedding_policy=vector_embedding_policy, + ) + + cosmosnosql_available = True +except (ImportError, Exception): + cosmosnosql_available = False + +from llama_index.core.schema import NodeRelationship, RelatedNodeInfo, TextNode +from llama_index.core.vector_stores.types import VectorStoreQuery +from llama_index.vector_stores.azurecosmosnosql import AzureCosmosDBNoSqlVectorSearch + + +@pytest.fixture(scope="session") +def node_embeddings() -> list[TextNode]: + return [ + TextNode( + text="lorem ipsum", + id_="c330d77f-90bd-4c51-9ed2-57d8d693b3b0", + relationships={NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test-0")}, + metadata={ + "author": "Stephen King", + "theme": "Friendship", + }, + embedding=[1.0, 0.0, 0.0], + ), + TextNode( + text="lorem ipsum", + id_="c3d1e1dd-8fb4-4b8f-b7ea-7fa96038d39d", + relationships={NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test-1")}, + metadata={ + "director": "Francis Ford Coppola", + "theme": "Mafia", + }, + embedding=[0.0, 1.0, 0.0], + ), + TextNode( + text="lorem ipsum", + id_="c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d", + relationships={NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test-2")}, + metadata={ + "director": "Christopher Nolan", + }, + embedding=[0.0, 0.0, 1.0], + ), + ] + + +@pytest.mark.skipif(not cosmosnosql_available, reason="cosmos client is not available") +class TestAzureCosmosNoSqlVectorSearch: + @classmethod + def setup_class(cls) -> None: + # insure the test container is empty + items_list = test_container.read_all_items() + first_item = next(iter(items_list), None) # type: ignore[index] + assert first_item is None + + @classmethod + def teardown_class(cls) -> None: + # delete all the items in the container + for item in test_container.query_items( + query="SELECT * FROM c", enable_cross_partition_query=True + ): + test_container.delete_item(item, partition_key=item["id"]) + + @pytest.fixture(autouse=True) + def setup(self) -> None: + # delete all the items in the container + for item in test_container.query_items( + query="SELECT * FROM c", enable_cross_partition_query=True + ): + test_container.delete_item(item, partition_key=item["id"]) + + def test_add_and_delete(self) -> None: + vector_store = AzureCosmosDBNoSqlVectorSearch( + cosmos_client=test_client, + vector_embedding_policy=vector_embedding_policy, + indexing_policy=indexing_policy, + database_name=database_name, + container_name=container_name, + cosmos_database_properties=cosmos_database_properties_test, + cosmos_container_properties=cosmos_container_properties_test, + ) + sleep(1) # waits for azure cosmos nosql to update + vector_store.add( + [ + TextNode( + text="test node text", + id_="test node id", + relationships={ + NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test doc id") + }, + embedding=[0.5, 0.5, 0.5], + ) + ] + ) + + items_amount = 0 + items_list = test_container.read_all_items() + for item in items_list: + items_amount += 1 + + assert items_amount == 1 + + vector_store.delete("test node id") + + items_amount = 0 + items_list = test_container.read_all_items() + for item in items_list: + items_amount += 1 + + assert items_amount == 0 + + def test_query(self, node_embeddings: List[TextNode]) -> None: + vector_store = AzureCosmosDBNoSqlVectorSearch( + cosmos_client=test_client, + vector_embedding_policy=vector_embedding_policy, + indexing_policy=indexing_policy, + cosmos_database_properties=cosmos_database_properties_test, + cosmos_container_properties=cosmos_container_properties_test, + ) + vector_store.add(node_embeddings) # type: ignore + sleep(1) # wait for azure cosmodb nosql to update the index + + res = vector_store.query( + VectorStoreQuery(query_embedding=[1.0, 0.0, 0.0], similarity_top_k=1) + ) + print("res:\n", res) + assert res.nodes + assert res.nodes[0].get_content() == "lorem ipsum" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/test_vector_stores_azurecosmosnosql.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/test_vector_stores_azurecosmosnosql.py new file mode 100644 index 0000000000000..3d7d5305814d0 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-azurecosmosnosql/tests/test_vector_stores_azurecosmosnosql.py @@ -0,0 +1,7 @@ +from llama_index.core.vector_stores.types import BasePydanticVectorStore +from llama_index.vector_stores.azurecosmosnosql import AzureCosmosDBNoSqlVectorSearch + + +def test_class(): + names_of_base_classes = [b.__name__ for b in AzureCosmosDBNoSqlVectorSearch.__mro__] + assert BasePydanticVectorStore.__name__ in names_of_base_classes diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/llama_index/vector_stores/kdbai/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/llama_index/vector_stores/kdbai/base.py index f4217000be3ff..9560874a88ad0 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/llama_index/vector_stores/kdbai/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/llama_index/vector_stores/kdbai/base.py @@ -137,6 +137,13 @@ def add( elif isinstance(self._table, kdbai.TablePyKx): schema = [item for item in schema if item != "sparseVectors"] + # For handling the double columns issue from backend (occurs only when schema has sparseVectors). + updated_schema = {} + for column in schema: + if column["name"] not in updated_schema: + updated_schema[column["name"]] = column + schema = list(updated_schema.values()) + try: for node in nodes: doc = { diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/pyproject.toml index df1e4d78a1877..692c8d81a2802 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-kdbai/pyproject.toml @@ -30,7 +30,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-kdbai" readme = "README.md" -version = "0.3.1" +version = "0.3.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/.gitignore b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/Makefile b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/README.md b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/README.md new file mode 100644 index 0000000000000..9b0be2972d336 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/README.md @@ -0,0 +1,46 @@ +# LlamaIndex Vector_Stores Integration: MariaDB + +With the release of MariaDB 11.6 Vector Preview, the MariaDB relational database introduced the long-awaited vector search functionality. +Thus now it can be used as a fully-functional vector store in LlamaIndex. +Please note, however, that the latest MariaDB version is only an Alpha release, which means that it may crash unexpectedly. + +To learn more about the feature, check the [Vector Overview](https://mariadb.com/kb/en/vector-overview/) in the MariaDB docs. + +## Installation + +```shell +pip install llama-index-vector-stores-mariadb +``` + +## Usage + +```python +from llama_index.vector_stores.mariadb import MariaDBVectorStore + +vector_store = MariaDBVectorStore.from_params( + host="localhost", + port=3306, + user="llamaindex", + password="password", + database="vectordb", + table_name="llama_index_vectorstore", + embed_dim=1536, # OpenAI embedding dimension +) +``` + +## Development + +### Running Integration Tests + +A suite of integration tests is available to verify the MariaDB vector store integration. +The test suite needs a MariaDB database with vector search support up and running, if not found the tests are skipped. +To facilitate that, a sample `docker-compose.yaml` file is provided, so you can simply do: + +```shell +docker compose -f tests/docker-compose.yaml up + +pytest -v + +# Clean up when you finish testing +docker compose -f tests/docker-compose.yaml down +``` diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/BUILD new file mode 100644 index 0000000000000..0d583429dff79 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/BUILD @@ -0,0 +1,4 @@ +resource( + name="py_typed", + source="py.typed", +) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/py.typed b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/__init__.py new file mode 100644 index 0000000000000..2f2836747523e --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/__init__.py @@ -0,0 +1,3 @@ +from llama_index.vector_stores.mariadb.base import MariaDBVectorStore + +__all__ = ["MariaDBVectorStore"] diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/base.py new file mode 100644 index 0000000000000..532a7197087b1 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/llama_index/vector_stores/mariadb/base.py @@ -0,0 +1,445 @@ +"""MariaDB Vector Store.""" + +import json +import logging +from typing import Any, Dict, List, NamedTuple, Optional, Union +from urllib.parse import quote_plus + +import sqlalchemy + +from llama_index.core.bridge.pydantic import PrivateAttr +from llama_index.core.schema import BaseNode, MetadataMode +from llama_index.core.vector_stores.types import ( + BasePydanticVectorStore, + FilterCondition, + FilterOperator, + MetadataFilter, + MetadataFilters, + VectorStoreQuery, + VectorStoreQueryMode, + VectorStoreQueryResult, +) +from llama_index.core.vector_stores.utils import ( + metadata_dict_to_node, + node_to_metadata_dict, +) + + +class DBEmbeddingRow(NamedTuple): + node_id: str + text: str + metadata: dict + similarity: float + + +_logger = logging.getLogger(__name__) + + +class MariaDBVectorStore(BasePydanticVectorStore): + """MariaDB Vector Store. + + Examples: + `pip install llama-index-vector-stores-mariadb` + + ```python + from llama_index.vector_stores.mariadb import MariaDBVectorStore + + # Create MariaDBVectorStore instance + vector_store = MariaDBVectorStore.from_params( + host="localhost", + port=3306, + user="llamaindex", + password="password", + database="vectordb", + table_name="llama_index_vectorstore", + embed_dim=1536 # OpenAI embedding dimension + ) + ``` + """ + + stores_text: bool = True + flat_metadata: bool = False + + connection_string: str + connection_args: Dict[str, Any] + table_name: str + schema_name: str + embed_dim: int + perform_setup: bool + debug: bool + + _engine: Any = PrivateAttr() + _is_initialized: bool = PrivateAttr(default=False) + + def __init__( + self, + connection_string: Union[str, sqlalchemy.engine.URL], + connection_args: Dict[str, Any], + table_name: str, + schema_name: str, + embed_dim: int = 1536, + perform_setup: bool = True, + debug: bool = False, + ) -> None: + """Constructor. + + Args: + connection_string (Union[str, sqlalchemy.engine.URL]): Connection string for the MariaDB server. + connection_args (Dict[str, Any]): A dictionary of connection options. + table_name (str): Table name. + schema_name (str): Schema name. + embed_dim (int, optional): Embedding dimensions. Defaults to 1536. + perform_setup (bool, optional): If DB should be set up. Defaults to True. + debug (bool, optional): Debug mode. Defaults to False. + """ + super().__init__( + connection_string=connection_string, + connection_args=connection_args, + table_name=table_name, + schema_name=schema_name, + embed_dim=embed_dim, + perform_setup=perform_setup, + debug=debug, + ) + + def close(self) -> None: + if not self._is_initialized: + return + + self._engine.dispose() + + @classmethod + def class_name(cls) -> str: + return "MariaDBVectorStore" + + @classmethod + def from_params( + cls, + host: Optional[str] = None, + port: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + table_name: str = "llamaindex", + schema_name: str = "public", + connection_string: Optional[Union[str, sqlalchemy.engine.URL]] = None, + connection_args: Optional[Dict[str, Any]] = None, + embed_dim: int = 1536, + perform_setup: bool = True, + debug: bool = False, + ) -> "MariaDBVectorStore": + """Construct from params. + + Args: + host (Optional[str], optional): Host of MariaDB connection. Defaults to None. + port (Optional[str], optional): Port of MariaDB connection. Defaults to None. + database (Optional[str], optional): MariaDB DB name. Defaults to None. + user (Optional[str], optional): MariaDB username. Defaults to None. + password (Optional[str], optional): MariaDB password. Defaults to None. + table_name (str): Table name. Defaults to "llamaindex". + schema_name (str): Schema name. Defaults to "public". + connection_string (Union[str, sqlalchemy.engine.URL]): Connection string to MariaDB DB. + connection_args (Dict[str, Any], optional): A dictionary of connection options. + embed_dim (int, optional): Embedding dimensions. Defaults to 1536. + perform_setup (bool, optional): If DB should be set up. Defaults to True. + debug (bool, optional): Debug mode. Defaults to False. + + Returns: + MariaDBVectorStore: Instance of MariaDBVectorStore constructed from params. + """ + conn_str = ( + connection_string + or f"mysql+pymysql://{user}:{quote_plus(password)}@{host}:{port}/{database}" + ) + conn_args = connection_args or { + "ssl": {"ssl_mode": "PREFERRED"}, + "read_timeout": 30, + } + + return cls( + connection_string=conn_str, + connection_args=conn_args, + table_name=table_name, + schema_name=schema_name, + embed_dim=embed_dim, + perform_setup=perform_setup, + debug=debug, + ) + + @property + def client(self) -> Any: + if not self._is_initialized: + return None + return self._engine + + def _connect(self) -> Any: + self._engine = sqlalchemy.create_engine( + self.connection_string, connect_args=self.connection_args, echo=self.debug + ) + + def _create_table_if_not_exists(self) -> None: + with self._engine.connect() as connection: + stmt = f""" + CREATE TABLE IF NOT EXISTS `{self.table_name}` ( + id SERIAL PRIMARY KEY, + node_id VARCHAR(255) NOT NULL, + text TEXT, + metadata JSON, + embedding BLOB NOT NULL, + VECTOR INDEX (embedding) + ); + """ + connection.execute(sqlalchemy.text(stmt)) + + connection.commit() + + def _initialize(self) -> None: + if not self._is_initialized: + self._connect() + if self.perform_setup: + self._create_table_if_not_exists() + self._is_initialized = True + + def get_nodes( + self, + node_ids: Optional[List[str]] = None, + filters: Optional[MetadataFilters] = None, + ) -> List[BaseNode]: + """Get nodes from vector store.""" + self._initialize() + + stmt = f"""SELECT text, metadata FROM `{self.table_name}` WHERE node_id IN :node_ids""" + + with self._engine.connect() as connection: + result = connection.execute(sqlalchemy.text(stmt), {"node_ids": node_ids}) + + nodes: List[BaseNode] = [] + for item in result: + node = metadata_dict_to_node(json.loads(item.metadata)) + node.set_content(str(item.text)) + nodes.append(node) + + return nodes + + def _node_to_table_row(self, node: BaseNode) -> Dict[str, Any]: + return { + "node_id": node.node_id, + "text": node.get_content(metadata_mode=MetadataMode.NONE), + "embedding": node.get_embedding(), + "metadata": node_to_metadata_dict( + node, + remove_text=True, + flat_metadata=self.flat_metadata, + ), + } + + def add( + self, + nodes: List[BaseNode], + **add_kwargs: Any, + ) -> List[str]: + self._initialize() + + ids = [] + with self._engine.connect() as connection: + for node in nodes: + ids.append(node.node_id) + item = self._node_to_table_row(node) + stmt = sqlalchemy.text( + f""" + INSERT INTO `{self.table_name}` (node_id, text, embedding, metadata) + VALUES ( + :node_id, + :text, + vec_fromtext(:embedding), + :metadata + ) + """ + ) + connection.execute( + stmt, + { + "node_id": item["node_id"], + "text": item["text"], + "embedding": json.dumps(item["embedding"]), + "metadata": json.dumps(item["metadata"]), + }, + ) + + connection.commit() + + return ids + + def _to_mariadb_operator(self, operator: FilterOperator) -> str: + if operator == FilterOperator.EQ: + return "=" + elif operator == FilterOperator.GT: + return ">" + elif operator == FilterOperator.LT: + return "<" + elif operator == FilterOperator.NE: + return "!=" + elif operator == FilterOperator.GTE: + return ">=" + elif operator == FilterOperator.LTE: + return "<=" + elif operator == FilterOperator.IN: + return "IN" + elif operator == FilterOperator.NIN: + return "NOT IN" + else: + _logger.warning("Unsupported operator: %s, fallback to '='", operator) + return "=" + + def _build_filter_clause(self, filter_: MetadataFilter) -> str: + filter_value = filter_.value + if filter_.operator in [FilterOperator.IN, FilterOperator.NIN]: + values = [] + for v in filter_.value: + if isinstance(v, str): + value = f"'{v}'" + + values.append(value) + filter_value = ", ".join(values) + filter_value = f"({filter_value})" + elif isinstance(filter_.value, str): + filter_value = f"'{filter_.value}'" + + return f"JSON_VALUE(metadata, '$.{filter_.key}') {self._to_mariadb_operator(filter_.operator)} {filter_value}" + + def _filters_to_where_clause(self, filters: MetadataFilters) -> str: + conditions = { + FilterCondition.OR: "OR", + FilterCondition.AND: "AND", + } + if filters.condition not in conditions: + raise ValueError( + f"Unsupported condition: {filters.condition}. " + f"Must be one of {list(conditions.keys())}" + ) + + clauses: List[str] = [] + for filter_ in filters.filters: + if isinstance(filter_, MetadataFilter): + clauses.append(self._build_filter_clause(filter_)) + continue + + if isinstance(filter_, MetadataFilters): + subfilters = self._filters_to_where_clause(filter_) + if subfilters: + clauses.append(f"({subfilters})") + continue + + raise ValueError( + f"Unsupported filter type: {type(filter_)}. Must be one of {MetadataFilter}, {MetadataFilters}" + ) + return f" {conditions[filters.condition]} ".join(clauses) + + def _db_rows_to_query_result( + self, rows: List[DBEmbeddingRow] + ) -> VectorStoreQueryResult: + nodes = [] + similarities = [] + ids = [] + for db_embedding_row in rows: + node = metadata_dict_to_node(db_embedding_row.metadata) + node.set_content(str(db_embedding_row.text)) + + similarities.append(db_embedding_row.similarity) + ids.append(db_embedding_row.node_id) + nodes.append(node) + + return VectorStoreQueryResult( + nodes=nodes, + similarities=similarities, + ids=ids, + ) + + def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult: + if query.mode != VectorStoreQueryMode.DEFAULT: + raise NotImplementedError(f"Query mode {query.mode} not available.") + + self._initialize() + + stmt = f""" + SELECT + node_id, + text, + embedding, + metadata, + vec_distance(embedding, vec_fromtext('{query.query_embedding}')) AS distance + FROM `{self.table_name}` + ORDER BY distance + LIMIT {query.similarity_top_k} + """ + + if query.filters: + where = self._filters_to_where_clause(query.filters) + + # We cannot use the query above when there is a WHERE clause, + # because of a bug in MariaDB: https://jira.mariadb.org/browse/MDEV-34774. + # The following query works around it. + stmt = f""" + SELECT * FROM ( + SELECT + node_id, + text, + embedding, + metadata, + vec_distance(embedding, vec_fromtext('{query.query_embedding}')) AS distance + FROM `{self.table_name}` + WHERE {where} + LIMIT 1000000 + ) AS unordered + ORDER BY distance + LIMIT {query.similarity_top_k} + """ + + with self._engine.connect() as connection: + result = connection.execute(sqlalchemy.text(stmt)) + + results = [] + for item in result: + results.append( + DBEmbeddingRow( + node_id=item.node_id, + text=item.text, + metadata=json.loads(item.metadata), + similarity=(1 - item.distance) if item.distance is not None else 0, + ) + ) + + return self._db_rows_to_query_result(results) + + def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None: + self._initialize() + + with self._engine.connect() as connection: + # Should we create an index on ref_doc_id? + stmt = f"""DELETE FROM `{self.table_name}` WHERE JSON_EXTRACT(metadata, '$.ref_doc_id') = :doc_id""" + connection.execute(sqlalchemy.text(stmt), {"doc_id": ref_doc_id}) + + connection.commit() + + def delete_nodes( + self, + node_ids: Optional[List[str]] = None, + filters: Optional[MetadataFilters] = None, + **delete_kwargs: Any, + ) -> None: + self._initialize() + + with self._engine.connect() as connection: + stmt = f"""DELETE FROM `{self.table_name}` WHERE node_id IN :node_ids""" + connection.execute(sqlalchemy.text(stmt), {"node_ids": node_ids}) + + connection.commit() + + def clear(self) -> None: + self._initialize() + + with self._engine.connect() as connection: + stmt = f"""DELETE FROM `{self.table_name}`""" + connection.execute(sqlalchemy.text(stmt)) + + connection.commit() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/pyproject.toml new file mode 100644 index 0000000000000..9d68fd17dfe0b --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/pyproject.toml @@ -0,0 +1,71 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.vector_stores.mariadb" + +[tool.llamahub.class_authors] +MariaDBVectorStore = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["Your Name "] +description = "llama-index vector_stores mariadb integration" +exclude = ["**/BUILD"] +license = "MIT" +name = "llama-index-vector-stores-mariadb" +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +llama-index-core = "^0.11.0" +pymysql = "^1.1.1" + +[tool.poetry.dependencies.sqlalchemy] +version = "~2.0" + +[tool.poetry.group.dev.dependencies] +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" + +[tool.poetry.group.dev.dependencies.black] +extras = ["jupyter"] +version = "<=23.9.1,>=23.7.0" + +[tool.poetry.group.dev.dependencies.codespell] +extras = ["toml"] +version = ">=v2.2.6" + +[[tool.poetry.packages]] +include = "llama_index/" + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::DeprecationWarning:", +] diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/docker-compose.yaml b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/docker-compose.yaml new file mode 100644 index 0000000000000..db287a5b3f5dc --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/docker-compose.yaml @@ -0,0 +1,8 @@ +services: + mariadb: + image: "quay.io/mariadb-foundation/mariadb-devel:11.6-vector-preview" + environment: + MARIADB_DATABASE: test + MARIADB_ROOT_PASSWORD: test + ports: + - "3306:3306" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/test_mariadb.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/test_mariadb.py new file mode 100644 index 0000000000000..4d337649126a6 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/test_mariadb.py @@ -0,0 +1,202 @@ +"""Integration tests for llama-index-vector-stores-mariadb.""" + +from typing import Generator, List + +import pytest +import sqlalchemy + +from llama_index.core.schema import NodeRelationship, RelatedNodeInfo, TextNode +from llama_index.core.vector_stores.types import ( + FilterCondition, + FilterOperator, + MetadataFilter, + MetadataFilters, + VectorStoreQuery, +) +from llama_index.vector_stores.mariadb import MariaDBVectorStore + +TEST_NODES: List[TextNode] = [ + TextNode( + text="lorem ipsum", + id_="c330d77f-90bd-4c51-9ed2-57d8d693b3b0", + relationships={NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test-0")}, + metadata={ + "author": "Stephen King", + "theme": "Friendship", + "pages": 1000, + }, + embedding=[1.0, 0.0, 0.0], + ), + TextNode( + text="dolor sit amet", + id_="c3d1e1dd-8fb4-4b8f-b7ea-7fa96038d39d", + relationships={NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test-1")}, + metadata={ + "director": "Francis Ford Coppola", + "theme": "Mafia", + }, + embedding=[0.0, 1.0, 0.0], + ), + TextNode( + text="consectetur adipiscing elit", + id_="c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d", + relationships={NodeRelationship.SOURCE: RelatedNodeInfo(node_id="test-2")}, + metadata={ + "director": "Christopher Nolan", + }, + embedding=[0.0, 0.0, 1.0], + ), +] + +vector_store = MariaDBVectorStore.from_params( + database="test", + table_name="vector_store_test", + host="127.0.0.1", + user="root", + password="test", + port="3306", +) + + +try: + # If you want to run the integration tests you need to do: + # docker-compose up + + # Check if we are able to connect to the MariaDB instance + engine: sqlalchemy.Engine = sqlalchemy.create_engine( + vector_store.connection_string, connect_args=vector_store.connection_args + ) + engine.connect() + engine.dispose() + + run_integration_tests = True +except Exception: + run_integration_tests = False + + +@pytest.fixture(autouse=True) +def teardown() -> Generator: + """Clear the store after a test completion.""" + yield + + vector_store.clear() + + +@pytest.fixture(scope="session", autouse=True) +def close_db_connection() -> Generator: + """Close the DB connections after the last test.""" + yield + + vector_store.close() + + +@pytest.mark.skipif( + run_integration_tests is False, + reason="MariaDB instance required for integration tests", +) +def test_query() -> None: + vector_store.add(TEST_NODES) + res = vector_store.query( + VectorStoreQuery(query_embedding=[1.0, 0.0, 0.0], similarity_top_k=1) + ) + assert res.nodes + assert len(res.nodes) == 1 + assert res.nodes[0].get_content() == "lorem ipsum" + + +@pytest.mark.skipif( + run_integration_tests is False, + reason="MariaDB instance required for integration tests", +) +def test_query_with_metadatafilters() -> None: + filters = MetadataFilters( + filters=[ + MetadataFilter( + key="director", + value=["Francis Ford Coppola", "Christopher Nolan"], + operator=FilterOperator.IN, + ), + MetadataFilters( + filters=[ + MetadataFilter( + key="theme", value="Mafia", operator=FilterOperator.EQ + ), + MetadataFilter(key="pages", value=1000, operator=FilterOperator.EQ), + ], + condition=FilterCondition.OR, + ), + ], + condition=FilterCondition.AND, + ) + + vector_store.add(TEST_NODES) + res = vector_store.query( + VectorStoreQuery( + query_embedding=[1.0, 0.0, 0.0], filters=filters, similarity_top_k=3 + ) + ) + + assert res.nodes + assert len(res.nodes) == 1 + assert res.nodes[0].get_content() == "dolor sit amet" + + +@pytest.mark.skipif( + run_integration_tests is False, + reason="MariaDB instance required for integration tests", +) +def test_delete() -> None: + vector_store.add(TEST_NODES) + vector_store.delete("test-0") + vector_store.delete("test-1") + res = vector_store.get_nodes( + node_ids=[ + "c330d77f-90bd-4c51-9ed2-57d8d693b3b0", + "c3d1e1dd-8fb4-4b8f-b7ea-7fa96038d39d", + "c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d", + ] + ) + assert len(res) == 1 + assert res[0].get_content() == "consectetur adipiscing elit" + assert res[0].id_ == "c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d" + + +@pytest.mark.skipif( + run_integration_tests is False, + reason="MariaDB instance required for integration tests", +) +def test_delete_nodes() -> None: + vector_store.add(TEST_NODES) + vector_store.delete_nodes( + node_ids=[ + "c330d77f-90bd-4c51-9ed2-57d8d693b3b0", + "c3d1e1dd-8fb4-4b8f-b7ea-7fa96038d39d", + ] + ) + res = vector_store.get_nodes( + node_ids=[ + "c330d77f-90bd-4c51-9ed2-57d8d693b3b0", + "c3d1e1dd-8fb4-4b8f-b7ea-7fa96038d39d", + "c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d", + ] + ) + assert len(res) == 1 + assert res[0].get_content() == "consectetur adipiscing elit" + assert res[0].id_ == "c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d" + + +@pytest.mark.skipif( + run_integration_tests is False, + reason="MariaDB instance required for integration tests", +) +def test_clear() -> None: + vector_store.add(TEST_NODES) + vector_store.clear() + res = vector_store.get_nodes( + node_ids=[ + "c330d77f-90bd-4c51-9ed2-57d8d693b3b0", + "c3d1e1dd-8fb4-4b8f-b7ea-7fa96038d39d", + "c3ew11cd-8fb4-4b8f-b7ea-7fa96038d39d", + ] + ) + assert len(res) == 0 diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/test_vector_stores_mariadb.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/test_vector_stores_mariadb.py new file mode 100644 index 0000000000000..0e690fe1ed71d --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-mariadb/tests/test_vector_stores_mariadb.py @@ -0,0 +1,7 @@ +from llama_index.core.vector_stores.types import BasePydanticVectorStore +from llama_index.vector_stores.mariadb import MariaDBVectorStore + + +def test_class(): + names_of_base_classes = [b.__name__ for b in MariaDBVectorStore.__mro__] + assert BasePydanticVectorStore.__name__ in names_of_base_classes diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/llama_index/vector_stores/milvus/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/llama_index/vector_stores/milvus/base.py index 93594941d450e..a8cba8db5706a 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/llama_index/vector_stores/milvus/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/llama_index/vector_stores/milvus/base.py @@ -6,10 +6,12 @@ import logging from typing import Any, Dict, List, Optional, Union +from copy import deepcopy from enum import Enum from llama_index.core.bridge.pydantic import Field, PrivateAttr +from llama_index.core.indices.query.embedding_utils import get_top_k_mmr_embeddings from llama_index.core.schema import BaseNode, TextNode from llama_index.core.utils import iter_batch from llama_index.vector_stores.milvus.utils import ( @@ -41,6 +43,7 @@ DEFAULT_BATCH_SIZE = 100 MILVUS_ID_FIELD = "id" +DEFAULT_MMR_PREFETCH_FACTOR = 4.0 try: from pymilvus import WeightedRanker, RRFRanker @@ -411,8 +414,6 @@ def delete_nodes( node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None. filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None. """ - from copy import deepcopy - filters_cpy = deepcopy(filters) or MetadataFilters(filters=[]) if node_ids: @@ -436,6 +437,58 @@ def clear(self) -> None: """Clears db.""" self._milvusclient.drop_collection(self.collection_name) + def get_nodes( + self, + node_ids: Optional[List[str]] = None, + filters: Optional[MetadataFilters] = None, + ) -> List[BaseNode]: + """Get nodes by node ids or metadata filters. + + Args: + node_ids (Optional[List[str]], optional): IDs of nodes to retrieve. Defaults to None. + filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None. + + Raises: + ValueError: Neither or both of node_ids and filters are provided. + + Returns: + List[BaseNode]: + """ + if node_ids is None and filters is None: + raise ValueError("Either node_ids or filters must be provided.") + + filters_cpy = deepcopy(filters) or MetadataFilters(filters=[]) + milvus_filter = _to_milvus_filter(filters_cpy) + + if node_ids is not None and milvus_filter: + raise ValueError("Only one of node_ids or filters can be provided.") + + res = self.client.query( + ids=node_ids, collection_name=self.collection_name, filter=milvus_filter + ) + + nodes = [] + for item in res: + if not self.text_key: + node = metadata_dict_to_node(item) + node.embedding = item.get(self.embedding_field, None) + else: + try: + text = item.pop(self.text_key) + except Exception: + raise ValueError( + "The passed in text_key value does not exist " + "in the retrieved entity." + ) from None + embedding = item.pop(self.embedding_field, None) + node = TextNode( + text=text, + embedding=embedding, + metadata=item, + ) + nodes.append(node) + return nodes + def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult: """Query index for top k most similar nodes. @@ -452,6 +505,8 @@ def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResul elif query.mode == VectorStoreQueryMode.HYBRID: if self.enable_sparse is False: raise ValueError(f"QueryMode is HYBRID, but enable_sparse is False.") + elif query.mode == VectorStoreQueryMode.MMR: + pass else: raise ValueError(f"Milvus does not support {query.mode} yet.") @@ -547,6 +602,88 @@ def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResul similarities.append(hit["distance"]) ids.append(hit["id"]) + elif query.mode == VectorStoreQueryMode.MMR: + # Perform MMR search + mmr_threshold = kwargs.get("mmr_threshold", None) + + if ( + kwargs.get("mmr_prefetch_factor") is not None + and kwargs.get("mmr_prefetch_k") is not None + ): + raise ValueError( + "'mmr_prefetch_factor' and 'mmr_prefetch_k' " + "cannot coexist in a call to query()" + ) + else: + if kwargs.get("mmr_prefetch_k") is not None: + prefetch_k0 = int(kwargs["mmr_prefetch_k"]) + else: + prefetch_k0 = int( + query.similarity_top_k + * kwargs.get("mmr_prefetch_factor", DEFAULT_MMR_PREFETCH_FACTOR) + ) + + res = self._milvusclient.search( + collection_name=self.collection_name, + data=[query.query_embedding], + filter=string_expr, + limit=prefetch_k0, + output_fields=output_fields, + search_params=self.search_config, + anns_field=self.embedding_field, + ) + + nodes = res[0] + node_embeddings = [] + node_ids = [] + for node in nodes: + node_embeddings.append(node["entity"]["embedding"]) + node_ids.append(node["id"]) + + mmr_similarities, mmr_ids = get_top_k_mmr_embeddings( + query_embedding=query.query_embedding, + embeddings=node_embeddings, + similarity_top_k=query.similarity_top_k, + embedding_ids=node_ids, + mmr_threshold=mmr_threshold, + ) + + node_dict = dict(list(zip(node_ids, nodes))) + selected_nodes = [node_dict[id] for id in mmr_ids if id in node_dict] + + nodes = [] + # Parse the results + for hit in selected_nodes: + if not self.text_key: + node = metadata_dict_to_node( + { + "_node_content": hit["entity"].get("_node_content", None), + "_node_type": hit["entity"].get("_node_type", None), + } + ) + else: + try: + text = hit["entity"].get(self.text_key) + except Exception: + raise ValueError( + "The passed in text_key value does not exist " + "in the retrieved entity." + ) + + metadata = { + key: hit["entity"].get(key) for key in self.output_fields + } + node = TextNode(text=text, metadata=metadata) + + nodes.append(node) + + similarities = mmr_similarities # Passing the MMR similarities instead of the original similarities + ids = mmr_ids + + logger.debug( + f"Successfully performed MMR on embeddings in collection: {self.collection_name}" + ) + else: # Perform hybrid search sparse_emb = self.sparse_embedding_function.encode_queries( diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/pyproject.toml index 8a3c5159b4a76..f5b2d12769908 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-milvus/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-milvus" readme = "README.md" -version = "0.2.1" +version = "0.2.3" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/llama_index/vector_stores/neo4jvector/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/llama_index/vector_stores/neo4jvector/base.py index 75d10891a5d20..226877eff0b46 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/llama_index/vector_stores/neo4jvector/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/llama_index/vector_stores/neo4jvector/base.py @@ -18,7 +18,6 @@ metadata_dict_to_node, node_to_metadata_dict, ) -from neo4j.exceptions import CypherSyntaxError _logger = logging.getLogger(__name__) @@ -436,26 +435,39 @@ def create_new_keyword_index(self, text_node_properties: List[str] = []) -> None self.database_query(fts_index_query) def database_query( - self, query: str, params: Optional[dict] = None - ) -> List[Dict[str, Any]]: - """ - This method sends a Cypher query to the connected Neo4j database - and returns the results as a list of dictionaries. - - Args: - query (str): The Cypher query to execute. - params (dict, optional): Dictionary of query parameters. Defaults to {}. - - Returns: - List[Dict[str, Any]]: List of dictionaries containing the query results. - """ + self, + query: str, + params: Optional[Dict[str, Any]] = None, + ) -> Any: params = params or {} - with self._driver.session(database=self._database) as session: - try: - data = session.run(query, params) - return [r.data() for r in data] - except CypherSyntaxError as e: - raise ValueError(f"Cypher Statement is not valid\n{e}") + try: + data, _, _ = self._driver.execute_query( + query, database=self._database, parameters_=params + ) + return [r.data() for r in data] + except neo4j.exceptions.Neo4jError as e: + if not ( + ( + ( # isCallInTransactionError + e.code == "Neo.DatabaseError.Statement.ExecutionFailed" + or e.code + == "Neo.DatabaseError.Transaction.TransactionStartFailed" + ) + and "in an implicit transaction" in e.message + ) + or ( # isPeriodicCommitError + e.code == "Neo.ClientError.Statement.SemanticError" + and ( + "in an open transaction is not possible" in e.message + or "tried to execute in an explicit transaction" in e.message + ) + ) + ): + raise + # Fallback to allow implicit transactions + with self._driver.session() as session: + data = session.run(neo4j.Query(text=query), params) + return [r.data() for r in data] def add(self, nodes: List[BaseNode], **add_kwargs: Any) -> List[str]: ids = [r.node_id for r in nodes] diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/pyproject.toml index 24b2da7a8f58f..190aa492125ab 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-neo4jvector/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-neo4jvector" readme = "README.md" -version = "0.2.1" +version = "0.2.2" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/CHANGELOG.md b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/CHANGELOG.md index bcbd5d3ab7ff8..8061e7bad3b9b 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/CHANGELOG.md +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/CHANGELOG.md @@ -1,5 +1,13 @@ # CHANGELOG — llama-index-vector-stores-opensearch +## [0.2.2] + +- Fixed issue where Opensearch Serverless does not support painless scripting so handling the case where is_aoss is set and using knn_score script instead. + +## [0.2.1] + +- Refresh Opensearch index after delete operation to reflect the change for future searches + ## [0.1.14] - Adds support for full MetadataFilters (all operators and nested filters) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/llama_index/vector_stores/opensearch/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/llama_index/vector_stores/opensearch/base.py index 662cc15f5eda6..6f6460b1e604b 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/llama_index/vector_stores/opensearch/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/llama_index/vector_stores/opensearch/base.py @@ -1,6 +1,5 @@ """Elasticsearch/Opensearch vector store.""" -import asyncio import uuid from datetime import datetime from typing import Any, Dict, Iterable, List, Optional, Union, cast @@ -22,14 +21,12 @@ metadata_dict_to_node, node_to_metadata_dict, ) -from opensearchpy import AsyncOpenSearch from opensearchpy.client import Client as OSClient -from opensearchpy.exceptions import NotFoundError -from opensearchpy.helpers import async_bulk IMPORT_OPENSEARCH_PY_ERROR = ( "Could not import OpenSearch. Please install it with `pip install opensearch-py`." ) +IMPORT_ASYNC_OPENSEARCH_PY_ERROR = "Could not import AsyncOpenSearch. Please install it with `pip install opensearch-py`." INVALID_HYBRID_QUERY_ERROR = ( "Please specify the lexical_query and search_pipeline for hybrid search." ) @@ -54,8 +51,11 @@ class OpensearchVectorClient: method (Optional[dict]): Opensearch "method" JSON obj for configuring the KNN index. This includes engine, metric, and other config params. Defaults to: - {"name": "hnsw", "space_type": "l2", "engine": "faiss", + {"name": "hnsw", "space_type": "l2", "engine": "nmslib", "parameters": {"ef_construction": 256, "m": 48}} + settings: Optional[dict]: Settings for the Opensearch index creation. Defaults to: + {"index": {"knn": True, "knn.algo_param.ef_search": 100}} + space_type (Optional[str]): space type for distance metric calculation. Defaults to: l2 **kwargs: Optional arguments passed to the OpenSearch client from opensearch-py. """ @@ -68,7 +68,9 @@ def __init__( embedding_field: str = "embedding", text_field: str = "content", method: Optional[dict] = None, + settings: Optional[dict] = None, engine: Optional[str] = "nmslib", + space_type: Optional[str] = "l2", max_chunk_bytes: int = 1 * 1024 * 1024, search_pipeline: Optional[str] = None, os_client: Optional[OSClient] = None, @@ -82,6 +84,8 @@ def __init__( "engine": engine, "parameters": {"ef_construction": 256, "m": 48}, } + if settings is None: + settings = {"index": {"knn": True, "knn.algo_param.ef_search": 100}} if embedding_field is None: embedding_field = "embedding" self._embedding_field = embedding_field @@ -94,10 +98,11 @@ def __init__( self._search_pipeline = search_pipeline http_auth = kwargs.get("http_auth") + self.space_type = space_type self.is_aoss = self._is_aoss_enabled(http_auth=http_auth) # initialize mapping idx_conf = { - "settings": {"index": {"knn": True, "knn.algo_param.ef_search": 100}}, + "settings": settings, "mappings": { "properties": { embedding_field: { @@ -108,36 +113,72 @@ def __init__( } }, } - self._os_client = os_client or self._get_async_opensearch_client( + self._os_client = os_client or self._get_opensearch_client( + self._endpoint, **kwargs + ) + self._os_async_client = self._get_async_opensearch_client( self._endpoint, **kwargs ) not_found_error = self._import_not_found_error() - event_loop = asyncio.get_event_loop() try: - event_loop.run_until_complete( - self._os_client.indices.get(index=self._index) - ) + self._os_client.indices.get(index=self._index) except not_found_error: - event_loop.run_until_complete( - self._os_client.indices.create(index=self._index, body=idx_conf) - ) - event_loop.run_until_complete( - self._os_client.indices.refresh(index=self._index) - ) + self._os_client.indices.create(index=self._index, body=idx_conf) + self._os_client.indices.refresh(index=self._index) - def _import_async_opensearch(self) -> Any: + def _import_opensearch(self) -> Any: """Import OpenSearch if available, otherwise raise error.""" + try: + from opensearchpy import OpenSearch + except ImportError: + raise ImportError(IMPORT_OPENSEARCH_PY_ERROR) + return OpenSearch + + def _import_async_opensearch(self) -> Any: + """Import AsyncOpenSearch if available, otherwise raise error.""" + try: + from opensearchpy import AsyncOpenSearch + except ImportError: + raise ImportError(IMPORT_ASYNC_OPENSEARCH_PY_ERROR) return AsyncOpenSearch - def _import_async_bulk(self) -> Any: + def _import_bulk(self) -> Any: """Import bulk if available, otherwise raise error.""" + try: + from opensearchpy.helpers import bulk + except ImportError: + raise ImportError(IMPORT_OPENSEARCH_PY_ERROR) + return bulk + + def _import_async_bulk(self) -> Any: + """Import async_bulk if available, otherwise raise error.""" + try: + from opensearchpy.helpers import async_bulk + except ImportError: + raise ImportError(IMPORT_ASYNC_OPENSEARCH_PY_ERROR) return async_bulk def _import_not_found_error(self) -> Any: """Import not found error if available, otherwise raise error.""" + try: + from opensearchpy.exceptions import NotFoundError + except ImportError: + raise ImportError(IMPORT_OPENSEARCH_PY_ERROR) return NotFoundError + def _get_opensearch_client(self, opensearch_url: str, **kwargs: Any) -> Any: + """Get OpenSearch client from the opensearch_url, otherwise raise error.""" + try: + opensearch = self._import_opensearch() + client = opensearch(opensearch_url, **kwargs) + except ValueError as e: + raise ImportError( + f"OpenSearch client string provided is not in proper format. " + f"Got error: {e} " + ) + return client + def _get_async_opensearch_client(self, opensearch_url: str, **kwargs: Any) -> Any: """Get AsyncOpenSearch client from the opensearch_url, otherwise raise error.""" try: @@ -151,7 +192,58 @@ def _get_async_opensearch_client(self, opensearch_url: str, **kwargs: Any) -> An ) return client - async def _bulk_ingest_embeddings( + def _bulk_ingest_embeddings( + self, + client: Any, + index_name: str, + embeddings: List[List[float]], + texts: Iterable[str], + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + vector_field: str = "embedding", + text_field: str = "content", + mapping: Optional[Dict] = None, + max_chunk_bytes: Optional[int] = 1 * 1024 * 1024, + is_aoss: bool = False, + ) -> List[str]: + """Bulk Ingest Embeddings into given index.""" + if not mapping: + mapping = {} + + bulk = self._import_bulk() + not_found_error = self._import_not_found_error() + requests = [] + return_ids = [] + + try: + client.indices.get(index=index_name) + except not_found_error: + client.indices.create(index=index_name, body=mapping) + + for i, text in enumerate(texts): + metadata = metadatas[i] if metadatas else {} + _id = ids[i] if ids else str(uuid.uuid4()) + request = { + "_op_type": "index", + "_index": index_name, + vector_field: embeddings[i], + text_field: text, + "metadata": metadata, + } + if is_aoss: + request["id"] = _id + else: + request["_id"] = _id + requests.append(request) + return_ids.append(_id) + + bulk(client, requests, max_chunk_bytes=max_chunk_bytes) + if not is_aoss: + client.indices.refresh(index=index_name) + + return return_ids + + async def _abulk_ingest_embeddings( self, client: Any, index_name: str, @@ -173,7 +265,6 @@ async def _bulk_ingest_embeddings( not_found_error = self._import_not_found_error() requests = [] return_ids = [] - mapping = mapping try: await client.indices.get(index=index_name) @@ -196,9 +287,11 @@ async def _bulk_ingest_embeddings( request["_id"] = _id requests.append(request) return_ids.append(_id) + await async_bulk(client, requests, max_chunk_bytes=max_chunk_bytes) if not is_aoss: await client.indices.refresh(index=index_name) + return return_ids def _default_approximate_search_query( @@ -309,9 +402,11 @@ def _knn_search_query( If there are no filters do approx-knn search. If there are (pre)-filters, do an exhaustive exact knn search using 'painless - scripting'. + scripting' if the version of Opensearch supports it, otherwise uses knn_score scripting score. - Note that approximate knn search does not support pre-filtering. + Note: + -AWS Opensearch Serverless does not support the painless scripting functionality at this time according to AWS. + -Also note that approximate knn search does not support pre-filtering. Args: query_embedding: Vector embedding to query. @@ -328,16 +423,25 @@ def _knn_search_query( search_query = self._default_approximate_search_query( query_embedding, k, vector_field=embedding_field ) + elif self.is_aoss: + # if is_aoss is set we are using Opensearch Serverless AWS offering which cannot use + # painless scripting so default scoring script returned will be just normal knn_score script + search_query = self._default_scoring_script_query( + query_embedding, + k, + space_type=self.space_type, + pre_filter={"bool": {"filter": pre_filter}}, + vector_field=embedding_field, + ) else: # https://opensearch.org/docs/latest/search-plugins/knn/painless-functions/ - search_query = self._default_painless_scripting_query( + search_query = self._default_scoring_script_query( query_embedding, k, space_type="l2Squared", pre_filter={"bool": {"filter": pre_filter}}, vector_field=embedding_field, ) - return search_query def _hybrid_search_query( @@ -382,7 +486,9 @@ def _lexical_search_query( def __get_painless_scripting_source( self, space_type: str, vector_field: str = "embedding" ) -> str: - """For Painless Scripting, it returns the script source based on space type.""" + """For Painless Scripting, it returns the script source based on space type. + This does not work with Opensearch Serverless currently. + """ source_value = ( f"(1.0 + {space_type}(params.query_value, doc['{vector_field}']))" ) @@ -391,7 +497,29 @@ def __get_painless_scripting_source( else: return f"1/{source_value}" - def _default_painless_scripting_query( + def _get_knn_scoring_script(self, space_type, vector_field, query_vector): + """Default scoring script that will work with AWS Opensearch Serverless.""" + return { + "source": "knn_score", + "lang": "knn", + "params": { + "field": vector_field, + "query_value": query_vector, + "space_type": space_type, + }, + } + + def _get_painless_scoring_script(self, space_type, vector_field, query_vector): + source = self.__get_painless_scripting_source(space_type, vector_field) + return { + "source": source, + "params": { + "field": vector_field, + "query_value": query_vector, + }, + } + + def _default_scoring_script_query( self, query_vector: List[float], k: int = 4, @@ -399,23 +527,31 @@ def _default_painless_scripting_query( pre_filter: Optional[Union[Dict, List]] = None, vector_field: str = "embedding", ) -> Dict: - """For Painless Scripting Search, this is the default query.""" + """For Scoring Script Search, this is the default query. Has to account for Opensearch Service + Serverless which does not support painless scripting functions so defaults to knn_score. + """ if not pre_filter: pre_filter = MATCH_ALL_QUERY - source = self.__get_painless_scripting_source(space_type, vector_field) + # check if we can use painless scripting or have to use default knn_score script + if self.is_aoss: + if space_type == "l2Squared": + raise ValueError( + "Unsupported space type for aoss. Can only use l1, l2, cosinesimil." + ) + script = self._get_knn_scoring_script( + space_type, vector_field, query_vector + ) + else: + script = self._get_painless_scoring_script( + space_type, vector_field, query_vector + ) return { "size": k, "query": { "script_score": { "query": pre_filter, - "script": { - "source": source, - "params": { - "field": vector_field, - "query_value": query_vector, - }, - }, + "script": script, } }, } @@ -430,7 +566,7 @@ def _is_aoss_enabled(self, http_auth: Any) -> bool: return True return False - async def index_results(self, nodes: List[BaseNode], **kwargs: Any) -> List[str]: + def index_results(self, nodes: List[BaseNode], **kwargs: Any) -> List[str]: """Store results in the index.""" embeddings: List[List[float]] = [] texts: List[str] = [] @@ -442,7 +578,7 @@ async def index_results(self, nodes: List[BaseNode], **kwargs: Any) -> List[str] texts.append(node.get_content(metadata_mode=MetadataMode.NONE)) metadatas.append(node_to_metadata_dict(node, remove_text=True)) - return await self._bulk_ingest_embeddings( + return self._bulk_ingest_embeddings( self._os_client, self._index, embeddings, @@ -456,7 +592,33 @@ async def index_results(self, nodes: List[BaseNode], **kwargs: Any) -> List[str] is_aoss=self.is_aoss, ) - async def delete_by_doc_id(self, doc_id: str) -> None: + async def aindex_results(self, nodes: List[BaseNode], **kwargs: Any) -> List[str]: + """Store results in the index.""" + embeddings: List[List[float]] = [] + texts: List[str] = [] + metadatas: List[dict] = [] + ids: List[str] = [] + for node in nodes: + ids.append(node.node_id) + embeddings.append(node.get_embedding()) + texts.append(node.get_content(metadata_mode=MetadataMode.NONE)) + metadatas.append(node_to_metadata_dict(node, remove_text=True)) + + return await self._abulk_ingest_embeddings( + self._os_async_client, + self._index, + embeddings, + texts, + metadatas=metadatas, + ids=ids, + vector_field=self._embedding_field, + text_field=self._text_field, + mapping=None, + max_chunk_bytes=self._max_chunk_bytes, + is_aoss=self.is_aoss, + ) + + def delete_by_doc_id(self, doc_id: str) -> None: """ Deletes all OpenSearch documents corresponding to the given LlamaIndex `Document` ID. @@ -466,9 +628,49 @@ async def delete_by_doc_id(self, doc_id: str) -> None: search_query = { "query": {"term": {"metadata.doc_id.keyword": {"value": doc_id}}} } - await self._os_client.delete_by_query(index=self._index, body=search_query) + self._os_client.delete_by_query( + index=self._index, body=search_query, refresh=True + ) + + async def adelete_by_doc_id(self, doc_id: str) -> None: + """ + Deletes all OpenSearch documents corresponding to the given LlamaIndex `Document` ID. + + Args: + doc_id (str): a LlamaIndex `Document` id + """ + search_query = { + "query": {"term": {"metadata.doc_id.keyword": {"value": doc_id}}} + } + await self._os_async_client.delete_by_query( + index=self._index, body=search_query, refresh=True + ) + + def delete_nodes( + self, + node_ids: Optional[List[str]] = None, + filters: Optional[MetadataFilters] = None, + **delete_kwargs: Any, + ) -> None: + """Deletes nodes. + + Args: + node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None. + filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None. + """ + if not node_ids and not filters: + return + + query = {"query": {"bool": {"filter": []}}} + if node_ids: + query["query"]["bool"]["filter"].append({"terms": {"_id": node_ids or []}}) + + if filters: + query["query"]["bool"]["filter"].extend(self._parse_filters(filters)) + + self._os_client.delete_by_query(index=self._index, body=query, refresh=True) - async def delete_nodes( + async def adelete_nodes( self, node_ids: Optional[List[str]] = None, filters: Optional[MetadataFilters] = None, @@ -490,12 +692,60 @@ async def delete_nodes( if filters: query["query"]["bool"]["filter"].extend(self._parse_filters(filters)) - await self._os_client.delete_by_query(index=self._index, body=query) + await self._os_async_client.delete_by_query( + index=self._index, body=query, refresh=True + ) - async def clear(self) -> None: + def clear(self) -> None: + """Clears index.""" + query = {"query": {"bool": {"filter": []}}} + self._os_client.delete_by_query(index=self._index, body=query, refresh=True) + + async def aclear(self) -> None: """Clears index.""" query = {"query": {"bool": {"filter": []}}} - await self._os_client.delete_by_query(index=self._index, body=query) + await self._os_async_client.delete_by_query( + index=self._index, body=query, refresh=True + ) + + def query( + self, + query_mode: VectorStoreQueryMode, + query_str: Optional[str], + query_embedding: List[float], + k: int, + filters: Optional[MetadataFilters] = None, + ) -> VectorStoreQueryResult: + if query_mode == VectorStoreQueryMode.HYBRID: + if query_str is None or self._search_pipeline is None: + raise ValueError(INVALID_HYBRID_QUERY_ERROR) + search_query = self._hybrid_search_query( + self._text_field, + query_str, + self._embedding_field, + query_embedding, + k, + filters=filters, + ) + params = { + "search_pipeline": self._search_pipeline, + } + elif query_mode == VectorStoreQueryMode.TEXT_SEARCH: + search_query = self._lexical_search_query( + self._text_field, query_str, k, filters=filters + ) + params = None + else: + search_query = self._knn_search_query( + self._embedding_field, query_embedding, k, filters=filters + ) + params = None + + res = self._os_client.search( + index=self._index, body=search_query, params=params + ) + + return self._to_query_result(res) async def aquery( self, @@ -530,7 +780,7 @@ async def aquery( ) params = None - res = await self._os_client.search( + res = await self._os_async_client.search( index=self._index, body=search_query, params=params ) @@ -641,9 +891,8 @@ def add( nodes: List[BaseNode]: list of nodes with embeddings. """ - return asyncio.get_event_loop().run_until_complete( - self.async_add(nodes, **add_kwargs) - ) + self._client.index_results(nodes) + return [result.node_id for result in nodes] async def async_add( self, @@ -657,32 +906,30 @@ async def async_add( nodes: List[BaseNode]: list of nodes with embeddings. """ - await self._client.index_results(nodes) + await self._client.aindex_results(nodes) return [result.node_id for result in nodes] def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None: """ - Delete nodes using a ref_doc_id. + Delete nodes using with ref_doc_id. Args: - ref_doc_id (str): The doc_id of the document whose nodes should be deleted. + ref_doc_id (str): The doc_id of the document to delete. """ - asyncio.get_event_loop().run_until_complete( - self.adelete(ref_doc_id, **delete_kwargs) - ) + self._client.delete_by_doc_id(ref_doc_id) async def adelete(self, ref_doc_id: str, **delete_kwargs: Any) -> None: """ - Async delete nodes using a ref_doc_id. + Async delete nodes using with ref_doc_id. Args: - ref_doc_id (str): The doc_id of the document whose nodes should be deleted. + ref_doc_id (str): The doc_id of the document to delete. """ - await self._client.delete_by_doc_id(ref_doc_id) + await self._client.adelete_by_doc_id(ref_doc_id) - async def adelete_nodes( + def delete_nodes( self, node_ids: Optional[List[str]] = None, filters: Optional[MetadataFilters] = None, @@ -694,31 +941,29 @@ async def adelete_nodes( node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None. filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None. """ - await self._client.delete_nodes(node_ids, filters, **delete_kwargs) + self._client.delete_nodes(node_ids, filters, **delete_kwargs) - def delete_nodes( + async def adelete_nodes( self, node_ids: Optional[List[str]] = None, filters: Optional[MetadataFilters] = None, **delete_kwargs: Any, ) -> None: - """Deletes nodes. + """Async deletes nodes async. Args: node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None. filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None. """ - asyncio.get_event_loop().run_until_complete( - self.adelete_nodes(node_ids, filters, **delete_kwargs) - ) - - async def aclear(self) -> None: - """Clears index.""" - await self._client.clear() + await self._client.adelete_nodes(node_ids, filters, **delete_kwargs) def clear(self) -> None: """Clears index.""" - asyncio.get_event_loop().run_until_complete(self.aclear()) + self._client.clear() + + async def aclear(self) -> None: + """Async clears index.""" + await self._client.aclear() def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult: """ @@ -728,7 +973,15 @@ def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResul query (VectorStoreQuery): Store query object. """ - return asyncio.get_event_loop().run_until_complete(self.aquery(query, **kwargs)) + query_embedding = cast(List[float], query.query_embedding) + + return self._client.query( + query.mode, + query.query_str, + query_embedding, + query.similarity_top_k, + filters=query.filters, + ) async def aquery( self, query: VectorStoreQuery, **kwargs: Any diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/pyproject.toml index dc78447bcd99d..755987bebe699 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-opensearch" readme = "README.md" -version = "0.2.0" +version = "0.3.0" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/tests/test_opensearch_client.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/tests/test_opensearch_client.py index 08eec1bf76743..6c73295ba405e 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/tests/test_opensearch_client.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-opensearch/tests/test_opensearch_client.py @@ -31,15 +31,15 @@ evt_loop = asyncio.get_event_loop() try: - from opensearchpy import AsyncOpenSearch + from opensearchpy import OpenSearch - os_client = AsyncOpenSearch("localhost:9200") - evt_loop.run_until_complete(os_client.info()) + sync_os_client = OpenSearch("localhost:9200") + sync_os_client.info() opensearch_not_available = False except (ImportError, Exception): opensearch_not_available = True finally: - evt_loop.run_until_complete(os_client.close()) + sync_os_client.close() TEST_EMBED_DIM = 3 @@ -81,9 +81,10 @@ def os_store(index_name: str) -> Generator[OpensearchVectorStore, None, None]: # teardown step # delete index - evt_loop.run_until_complete(client._os_client.indices.delete(index=index_name)) - # close client aiohttp session - evt_loop.run_until_complete(client._os_client.close()) + client._os_client.indices.delete(index=index_name) + # close client + client._os_client.close() + client._os_async_client.close() @pytest.fixture(scope="session") @@ -190,12 +191,8 @@ def node_embeddings_2() -> List[TextNode]: def count_docs_in_index(os_store: OpensearchVectorStore) -> int: """Refresh indices and return the count of documents in the index.""" - evt_loop.run_until_complete( - os_store.client._os_client.indices.refresh(index=os_store.client._index) - ) - count = evt_loop.run_until_complete( - os_store.client._os_client.count(index=os_store.client._index) - ) + os_store.client._os_client.indices.refresh(index=os_store.client._index) + count = os_store.client._os_client.count(index=os_store.client._index) return count["count"] @@ -216,6 +213,24 @@ def test_functionality( assert count_docs_in_index(os_store) == len(node_embeddings) - 1 +@pytest.mark.asyncio() +@pytest.mark.skipif(opensearch_not_available, reason="opensearch is not available") +async def test_async_functionality( + os_store: OpensearchVectorStore, node_embeddings: List[TextNode] +) -> None: + # add + assert len(await os_store.async_add(node_embeddings)) == len(node_embeddings) + # query + exp_node = node_embeddings[3] + query = VectorStoreQuery(query_embedding=exp_node.embedding, similarity_top_k=1) + query_result = await os_store.aquery(query) + assert query_result.nodes + assert query_result.nodes[0].get_content() == exp_node.text + # delete one node using its associated doc_id + await os_store.adelete("test-1") + assert count_docs_in_index(os_store) == len(node_embeddings) - 1 + + @pytest.mark.skipif(opensearch_not_available, reason="opensearch is not available") def test_delete_nodes( os_store: OpensearchVectorStore, node_embeddings_2: List[TextNode] @@ -244,6 +259,35 @@ def test_delete_nodes( assert "ccc" in res.ids +@pytest.mark.asyncio() +@pytest.mark.skipif(opensearch_not_available, reason="opensearch is not available") +async def test_adelete_nodes( + os_store: OpensearchVectorStore, node_embeddings_2: List[TextNode] +): + await os_store.async_add(node_embeddings_2) + + q = VectorStoreQuery(query_embedding=_get_sample_vector(0.5), similarity_top_k=10) + + # test deleting nothing + await os_store.adelete_nodes() + time.sleep(1) + res = await os_store.aquery(q) + assert all(i in res.ids for i in ["aaa", "bbb", "ccc"]) + + # test deleting element that doesn't exist + await os_store.adelete_nodes(["asdf"]) + time.sleep(1) + res = await os_store.aquery(q) + assert all(i in res.ids for i in ["aaa", "bbb", "ccc"]) + + # test deleting list + await os_store.adelete_nodes(["aaa", "bbb"]) + time.sleep(1) + res = await os_store.aquery(q) + assert all(i not in res.ids for i in ["aaa", "bbb"]) + assert "ccc" in res.ids + + @pytest.mark.skipif(opensearch_not_available, reason="opensearch is not available") def test_delete_nodes_metadata( os_store: OpensearchVectorStore, node_embeddings_2: List[TextNode] @@ -336,6 +380,26 @@ def test_clear( assert len(res.ids) == 0 +@pytest.mark.asyncio() +@pytest.mark.skipif(opensearch_not_available, reason="opensearch is not available") +async def test_aclear( + os_store: OpensearchVectorStore, node_embeddings_2: List[TextNode] +) -> None: + await os_store.async_add(node_embeddings_2) + + q = VectorStoreQuery(query_embedding=_get_sample_vector(0.5), similarity_top_k=10) + res = await os_store.aquery(q) + assert all(i in res.ids for i in ["bbb", "aaa", "ddd", "ccc"]) + + await os_store.aclear() + + time.sleep(1) + + res = await os_store.aquery(q) + assert all(i not in res.ids for i in ["bbb", "aaa", "ddd", "ccc"]) + assert len(res.ids) == 0 + + @pytest.fixture() def insert_document(os_store: OpensearchVectorStore): """Factory to insert a document with custom metadata into the OpensearchVectorStore.""" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/llama_index/vector_stores/postgres/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/llama_index/vector_stores/postgres/base.py index 21749780d0574..5a8087ae568cd 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/llama_index/vector_stores/postgres/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/llama_index/vector_stores/postgres/base.py @@ -138,7 +138,7 @@ class PGVectorStore(BasePydanticVectorStore): flat_metadata: bool = False connection_string: str - async_connection_string: Union[str, sqlalchemy.engine.URL] + async_connection_string: str table_name: str schema_name: str embed_dim: int @@ -148,6 +148,7 @@ class PGVectorStore(BasePydanticVectorStore): perform_setup: bool debug: bool use_jsonb: bool + create_engine_kwargs: Dict hnsw_kwargs: Optional[Dict[str, Any]] @@ -173,6 +174,7 @@ def __init__( debug: bool = False, use_jsonb: bool = False, hnsw_kwargs: Optional[Dict[str, Any]] = None, + create_engine_kwargs: Optional[Dict[str, Any]] = None, ) -> None: """Constructor. @@ -191,6 +193,7 @@ def __init__( hnsw_kwargs (Optional[Dict[str, Any]], optional): HNSW kwargs, a dict that contains "hnsw_ef_construction", "hnsw_ef_search", "hnsw_m", and optionally "hnsw_dist_method". Defaults to None, which turns off HNSW search. + create_engine_kwargs (Optional[Dict[str, Any]], optional): Engine parameters to pass to create_engine. Defaults to None. """ table_name = table_name.lower() schema_name = schema_name.lower() @@ -204,8 +207,8 @@ def __init__( from sqlalchemy.orm import declarative_base super().__init__( - connection_string=connection_string, - async_connection_string=async_connection_string, + connection_string=str(connection_string), + async_connection_string=str(async_connection_string), table_name=table_name, schema_name=schema_name, hybrid_search=hybrid_search, @@ -216,6 +219,7 @@ def __init__( debug=debug, use_jsonb=use_jsonb, hnsw_kwargs=hnsw_kwargs, + create_engine_kwargs=create_engine_kwargs or {}, ) # sqlalchemy model @@ -264,6 +268,7 @@ def from_params( debug: bool = False, use_jsonb: bool = False, hnsw_kwargs: Optional[Dict[str, Any]] = None, + create_engine_kwargs: Optional[Dict[str, Any]] = None, ) -> "PGVectorStore": """Construct from params. @@ -287,6 +292,7 @@ def from_params( hnsw_kwargs (Optional[Dict[str, Any]], optional): HNSW kwargs, a dict that contains "hnsw_ef_construction", "hnsw_ef_search", "hnsw_m", and optionally "hnsw_dist_method". Defaults to None, which turns off HNSW search. + create_engine_kwargs (Optional[Dict[str, Any]], optional): Engine parameters to pass to create_engine. Defaults to None. Returns: PGVectorStore: Instance of PGVectorStore constructed from params. @@ -311,6 +317,7 @@ def from_params( debug=debug, use_jsonb=use_jsonb, hnsw_kwargs=hnsw_kwargs, + create_engine_kwargs=create_engine_kwargs, ) @property @@ -324,10 +331,14 @@ def _connect(self) -> Any: from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import sessionmaker - self._engine = create_engine(self.connection_string, echo=self.debug) + self._engine = create_engine( + self.connection_string, echo=self.debug, **self.create_engine_kwargs + ) self._session = sessionmaker(self._engine) - self._async_engine = create_async_engine(self.async_connection_string) + self._async_engine = create_async_engine( + self.async_connection_string, **self.create_engine_kwargs + ) self._async_session = sessionmaker(self._async_engine, class_=AsyncSession) # type: ignore def _create_schema_if_not_exists(self) -> None: diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/pyproject.toml index 74fe4bc1a2e6d..dd19c6a71bd8f 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-postgres/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-postgres" readme = "README.md" -version = "0.2.1" +version = "0.2.4" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/llama_index/vector_stores/redis/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/llama_index/vector_stores/redis/base.py index d37ecd8c19344..c53f963f63d86 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/llama_index/vector_stores/redis/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/llama_index/vector_stores/redis/base.py @@ -430,9 +430,7 @@ def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResul raise ValueError("Query embedding is required for querying.") redis_query = self._to_redis_query(query) - logger.info( - f"Querying index {self._index.name} with filters {redis_query.get_filter()}" - ) + logger.info(f"Querying index {self._index.name} with query {redis_query!s}") try: results = self._index.query(redis_query) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/pyproject.toml index e0e2e7a7fde80..f44975cd5474e 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/pyproject.toml @@ -18,7 +18,7 @@ RedisVectorStore = "redis" disallow_untyped_defs = true exclude = ["_static", "build", "examples", "notebooks", "venv"] ignore_missing_imports = true -python_version = "3.8" +python_version = "3.9" [tool.poetry] authors = ["Tyler Hutcherson "] @@ -27,14 +27,15 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-redis" readme = "README.md" -version = "0.3.0" +version = "0.3.2" [tool.poetry.dependencies] -python = ">=3.8.1,<4.0" -redisvl = "^0.1.3" +python = ">=3.9,<4.0" +redisvl = "^0.3.2" llama-index-core = "^0.11.0" [tool.poetry.group.dev.dependencies] +docker = "^7.1.0" ipython = "8.10.0" jupyter = "^1.0.0" mypy = "0.991" @@ -42,6 +43,7 @@ pre-commit = "3.2.0" pylint = "2.15.10" pytest = "7.2.1" pytest-mock = "3.11.1" +redis = "^5.0.8" ruff = "0.0.292" tree-sitter-languages = "^1.8.0" types-Deprecated = ">=0.1.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/BUILD index dabf212d7e716..45d59ac8248a2 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/BUILD +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/BUILD @@ -1 +1,5 @@ python_tests() + +python_test_utils( + name="test_utils", +) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/conftest.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/conftest.py new file mode 100644 index 0000000000000..59adf5f75fb1f --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/conftest.py @@ -0,0 +1,71 @@ +import pytest +from typing import List +from llama_index.core.schema import Document, TextNode +from llama_index.core.node_parser import SentenceSplitter +from redis import Redis +import docker + +docker_client = docker.from_env() +docker_client.ping() + +container = docker_client.containers.run( + "redis/redis-stack:latest", + detach=True, + name="redis", + ports={"6379/tcp": 6379, "8001/tcp": 8001}, +) + + +@pytest.fixture(scope="session", autouse=True) +def docker_setup(): + yield container + + container.stop() + container.remove() + + +@pytest.fixture() +def dummy_embedding() -> List: + return [0] * 1536 + + +@pytest.fixture() +def turtle_test() -> dict: + return { + "text": "something about turtles", + "metadata": {"animal": "turtle"}, + "question": "turtle stuff", + "doc_id": "1234", + } + + +@pytest.fixture() +def documents(turtle_test, dummy_embedding) -> List[Document]: + """List of documents represents data to be embedded in the datastore. + Minimum requirements for Documents in the /upsert endpoint's UpsertRequest. + """ + return [ + Document( + text=turtle_test["text"], + metadata=turtle_test["metadata"], + doc_id=turtle_test["doc_id"], + embedding=dummy_embedding, + ), + Document( + text="something about whales", + metadata={"animal": "whale"}, + doc_id="5678", + embedding=dummy_embedding, + ), + ] + + +@pytest.fixture() +def test_nodes(documents) -> TextNode: + parser = SentenceSplitter() + return parser.get_nodes_from_documents(documents) + + +@pytest.fixture() +def redis_client() -> Redis: + return Redis.from_url("redis://localhost:6379/0") diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/test_vector_stores_redis.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/test_vector_stores_redis.py index a2fa952d77ca8..7c25402b7d867 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/test_vector_stores_redis.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-redis/tests/test_vector_stores_redis.py @@ -1,3 +1,5 @@ +from llama_index.core import MockEmbedding, StorageContext, VectorStoreIndex +from llama_index.core.llms import MockLLM from llama_index.core.vector_stores.types import BasePydanticVectorStore from llama_index.vector_stores.redis import RedisVectorStore @@ -5,3 +7,32 @@ def test_class(): names_of_base_classes = [b.__name__ for b in RedisVectorStore.__mro__] assert BasePydanticVectorStore.__name__ in names_of_base_classes + + +def test_default_usage(documents, turtle_test, redis_client): + vector_store = RedisVectorStore(redis_client=redis_client) + storage_context = StorageContext.from_defaults(vector_store=vector_store) + index = VectorStoreIndex.from_documents( + documents, + embed_model=MockEmbedding(embed_dim=1536), + storage_context=storage_context, + ) + + # create retrievers + query_engine = index.as_query_engine(llm=MockLLM(), similarity_top_k=1) + retriever = index.as_retriever(similarity_top_k=1) + + result_nodes = retriever.retrieve(turtle_test["question"]) + query_res = query_engine.query(turtle_test["question"]) + + # test they get data + assert result_nodes[0].metadata == turtle_test["metadata"] + assert query_res.source_nodes[0].text == turtle_test["text"] + + # test delete + vector_store.delete([doc.doc_id for doc in documents]) + res = redis_client.ft("llama_index").search("*") + assert len(res.docs) == 0 + + # test delete index + vector_store.delete_index() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/.gitignore b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/.gitignore new file mode 100644 index 0000000000000..990c18de22908 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/.gitignore @@ -0,0 +1,153 @@ +llama_index/_static +.DS_Store +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +bin/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +etc/ +include/ +lib/ +lib64/ +parts/ +sdist/ +share/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints +notebooks/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pyvenv.cfg + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Jetbrains +.idea +modules/ +*.swp + +# VsCode +.vscode + +# pipenv +Pipfile +Pipfile.lock + +# pyright +pyrightconfig.json diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/BUILD new file mode 100644 index 0000000000000..0896ca890d8bf --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/BUILD @@ -0,0 +1,3 @@ +poetry_requirements( + name="poetry", +) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/Makefile b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/Makefile new file mode 100644 index 0000000000000..b9eab05aa3706 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/Makefile @@ -0,0 +1,17 @@ +GIT_ROOT ?= $(shell git rev-parse --show-toplevel) + +help: ## Show all Makefile targets. + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}' + +format: ## Run code autoformatters (black). + pre-commit install + git ls-files | xargs pre-commit run black --files + +lint: ## Run linters: pre-commit (black, ruff, codespell) and mypy + pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files + +test: ## Run tests via pytest. + pytest tests + +watch-docs: ## Build and watch documentation. + sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/README.md b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/README.md new file mode 100644 index 0000000000000..09a618ba819c9 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/README.md @@ -0,0 +1,133 @@ +# LlamaIndex Vector_Stores Integration: Tablestore + +> [Tablestore](https://www.aliyun.com/product/ots) is a fully managed NoSQL cloud database service that enables storage of a massive amount of structured +> and semi-structured data. + +This page shows how to use functionality related to the `Tablestore` vector database. + +To use Tablestore, you must create an instance. +Here are the [creating instance instructions](https://help.aliyun.com/zh/tablestore/getting-started/manage-the-wide-column-model-in-the-tablestore-console). + +## Example + +```shell +pip install llama-index-vector-stores-tablestore +``` + +```python +import os + +import tablestore +from llama_index.core import MockEmbedding +from llama_index.core.schema import TextNode +from llama_index.core.vector_stores import ( + VectorStoreQuery, + MetadataFilters, + MetadataFilter, + FilterCondition, + FilterOperator, +) + +from llama_index.vector_stores.tablestore import TablestoreVectorStore + +# 1. create tablestore vector store +test_dimension_size = 4 +store = TablestoreVectorStore( + endpoint=os.getenv("end_point"), + instance_name=os.getenv("instance_name"), + access_key_id=os.getenv("access_key_id"), + access_key_secret=os.getenv("access_key_secret"), + vector_dimension=test_dimension_size, + vector_metric_type=tablestore.VectorMetricType.VM_COSINE, + # metadata mapping is used to filter non-vector fields. + metadata_mappings=[ + tablestore.FieldSchema( + "type", + tablestore.FieldType.KEYWORD, + index=True, + enable_sort_and_agg=True, + ), + tablestore.FieldSchema( + "time", + tablestore.FieldType.LONG, + index=True, + enable_sort_and_agg=True, + ), + ], +) + +# 2. create table and index +store.create_table_if_not_exist() +store.create_search_index_if_not_exist() + +# 3. new a mock embedding for test +embedder = MockEmbedding(test_dimension_size) + +# 4. prepare some docs +movies = [ + TextNode( + id_="1", + text="hello world", + metadata={"type": "a", "time": 1995}, + ), + TextNode( + id_="2", + text="a b c", + metadata={"type": "a", "time": 1990}, + ), + TextNode( + id_="3", + text="sky cloud table", + metadata={"type": "a", "time": 2009}, + ), + TextNode( + id_="4", + text="dog cat", + metadata={"type": "a", "time": 2023}, + ), + TextNode( + id_="5", + text="computer python java", + metadata={"type": "b", "time": 2018}, + ), + TextNode( + id_="6", + text="java python js nodejs", + metadata={"type": "c", "time": 2010}, + ), + TextNode( + id_="7", + text="sdk golang python", + metadata={"type": "a", "time": 2023}, + ), +] +for movie in movies: + movie.embedding = embedder.get_text_embedding(movie.text) + +# 5. write some docs +ids = store.add(movies) +assert len(ids) == 7 + +# 6. delete docs +store.delete(ids[0]) + +# 7. query with filters +query_result = store.query( + query=VectorStoreQuery( + query_embedding=embedder.get_text_embedding("nature fight physical"), + similarity_top_k=5, + filters=MetadataFilters( + filters=[ + MetadataFilter( + key="type", value="a", operator=FilterOperator.EQ + ), + MetadataFilter( + key="time", value=2020, operator=FilterOperator.LTE + ), + ], + condition=FilterCondition.AND, + ), + ), +) +print(query_result) +``` diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/BUILD new file mode 100644 index 0000000000000..db46e8d6c978c --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/BUILD @@ -0,0 +1 @@ +python_sources() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/__init__.py new file mode 100644 index 0000000000000..8d50119edf87d --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/__init__.py @@ -0,0 +1,4 @@ +from llama_index.vector_stores.tablestore.base import TablestoreVectorStore + + +__all__ = ["TablestoreVectorStore"] diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/base.py new file mode 100644 index 0000000000000..e8eac6f35d030 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/llama_index/vector_stores/tablestore/base.py @@ -0,0 +1,714 @@ +"""Tablestore vector store.""" + +import json +import traceback +from logging import getLogger +from typing import Any, List, Optional, Dict + +import tablestore +from llama_index.core.bridge.pydantic import PrivateAttr +from llama_index.core.schema import BaseNode, TextNode +from llama_index.core.vector_stores.types import ( + BasePydanticVectorStore, + MetadataFilters, + VectorStoreQuery, + VectorStoreQueryResult, + FilterCondition, + MetadataFilter, + FilterOperator, +) + + +class TablestoreVectorStore(BasePydanticVectorStore): + """`Tablestore` vector store. + + To use, you should have the ``tablestore`` python package installed. + + Examples: + ```python + import tablestore + import os + + store = TablestoreVectorStore( + endpoint=os.getenv("end_point"), + instance_name=os.getenv("instance_name"), + access_key_id=os.getenv("access_key_id"), + access_key_secret=os.getenv("access_key_secret"), + vector_dimension=512, + vector_metric_type=tablestore.VectorMetricType.VM_COSINE, + # metadata mapping is used to filter non-vector fields. + metadata_mappings=[ + tablestore.FieldSchema( + "type", + tablestore.FieldType.KEYWORD, + index=True, + enable_sort_and_agg=True, + ), + tablestore.FieldSchema( + "time", tablestore.FieldType.LONG, index=True, enable_sort_and_agg=True + ), + ], + ) + ``` + """ + + is_embedding_query: bool = True + stores_text: bool = True + _logger: Any = PrivateAttr(default=None) + _tablestore_client: tablestore.OTSClient = PrivateAttr(default=None) + _table_name: str = PrivateAttr(default="llama_index_vector_store_ots_v1") + _index_name: str = PrivateAttr(default="llama_index_vector_store_ots_index_v1") + _text_field: str = PrivateAttr(default="content") + _vector_field: str = PrivateAttr(default="embedding") + _ref_doc_id_field: str = PrivateAttr(default="ref_doc_id") + _metadata_mappings: List[tablestore.FieldSchema] = PrivateAttr(default=None) + + def __init__( + self, + tablestore_client: Optional[tablestore.OTSClient] = None, + endpoint: Optional[str] = None, + instance_name: Optional[str] = None, + access_key_id: Optional[str] = None, + access_key_secret: Optional[str] = None, + table_name: str = "llama_index_vector_store_ots_v1", + index_name: str = "llama_index_vector_store_ots_index_v1", + text_field: str = "content", + vector_field: str = "embedding", + ref_doc_id_field: str = "ref_doc_id", + vector_dimension: int = 512, + vector_metric_type: tablestore.VectorMetricType = tablestore.VectorMetricType.VM_COSINE, + metadata_mappings: Optional[List[tablestore.FieldSchema]] = None, + ) -> None: + super().__init__() + self._logger = getLogger(__name__) + if not tablestore_client: + self._tablestore_client = tablestore.OTSClient( + endpoint, + access_key_id, + access_key_secret, + instance_name, + retry_policy=tablestore.WriteRetryPolicy(), + ) + else: + self._tablestore_client = tablestore_client + self._table_name = table_name + self._index_name = index_name + self._text_field = text_field + self._vector_field = vector_field + self._ref_doc_id_field = ref_doc_id_field + + self._metadata_mappings = [ + tablestore.FieldSchema( + text_field, + tablestore.FieldType.TEXT, + index=True, + enable_sort_and_agg=False, + store=False, + analyzer=tablestore.AnalyzerType.MAXWORD, + ), + tablestore.FieldSchema( + ref_doc_id_field, + tablestore.FieldType.KEYWORD, + index=True, + enable_sort_and_agg=True, + store=False, + ), + tablestore.FieldSchema( + vector_field, + tablestore.FieldType.VECTOR, + vector_options=tablestore.VectorOptions( + data_type=tablestore.VectorDataType.VD_FLOAT_32, + dimension=vector_dimension, + metric_type=vector_metric_type, + ), + ), + ] + if metadata_mappings: + for mapping in metadata_mappings: + if ( + mapping.field_name == text_field + or mapping.field_name == vector_field + or mapping.field_name == ref_doc_id_field + ): + continue + self._metadata_mappings.append(mapping) + + def create_table_if_not_exist(self) -> None: + """Create table if not exist.""" + table_list = self._tablestore_client.list_table() + if self._table_name in table_list: + self._logger.info( + "Tablestore system table[%s] already exists", self._table_name + ) + return + self._logger.info( + "Tablestore system table[%s] does not exist, try to create the table.", + self._table_name, + ) + + schema_of_primary_key = [("id", "STRING")] + table_meta = tablestore.TableMeta(self._table_name, schema_of_primary_key) + table_options = tablestore.TableOptions() + reserved_throughput = tablestore.ReservedThroughput( + tablestore.CapacityUnit(0, 0) + ) + try: + self._tablestore_client.create_table( + table_meta, table_options, reserved_throughput + ) + self._logger.info( + "Tablestore create table[%s] successfully.", self._table_name + ) + except tablestore.OTSClientError as e: + traceback.print_exc() + self._logger.exception( + "Tablestore create system table[%s] failed with client error, http_status:%d, error_message:%s", + self._table_name, + e.get_http_status(), + e.get_error_message(), + ) + except tablestore.OTSServiceError as e: + traceback.print_exc() + self._logger.exception( + "Tablestore create system table[%s] failed with client error, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + self._table_name, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + + def create_search_index_if_not_exist(self) -> None: + """Create search index if not exist.""" + search_index_list = self._tablestore_client.list_search_index( + table_name=self._table_name + ) + if self._index_name in [t[1] for t in search_index_list]: + self._logger.info( + "Tablestore system index[%s] already exists", self._index_name + ) + return + index_meta = tablestore.SearchIndexMeta(self._metadata_mappings) + self._tablestore_client.create_search_index( + self._table_name, self._index_name, index_meta + ) + self._logger.info( + "Tablestore create system index[%s] successfully.", self._index_name + ) + + def delete_table_if_exists(self): + """Delete table if exists.""" + search_index_list = self._tablestore_client.list_search_index( + table_name=self._table_name + ) + for resp_tuple in search_index_list: + self._tablestore_client.delete_search_index(resp_tuple[0], resp_tuple[1]) + self._logger.info( + "Tablestore delete index[%s] successfully.", self._index_name + ) + self._tablestore_client.delete_table(self._table_name) + self._logger.info( + "Tablestore delete system table[%s] successfully.", self._index_name + ) + + def delete_search_index(self, table_name, index_name) -> None: + self._tablestore_client.delete_search_index(table_name, index_name) + self._logger.info("Tablestore delete index[%s] successfully.", self._index_name) + + def _write_row( + self, + row_id: str, + content: str, + embedding_vector: List[float], + metadata: Dict[str, Any], + ) -> None: + primary_key = [("id", row_id)] + attribute_columns = [ + (self._text_field, content), + (self._vector_field, json.dumps(embedding_vector)), + ] + for k, v in metadata.items(): + item = (k, v) + attribute_columns.append(item) + row = tablestore.Row(primary_key, attribute_columns) + + try: + self._tablestore_client.put_row(self._table_name, row) + self._logger.debug( + "Tablestore put row successfully. id:%s, content:%s, meta_data:%s", + row_id, + content, + metadata, + ) + except tablestore.OTSClientError as e: + self._logger.exception( + "Tablestore put row failed with client error:%s, id:%s, content:%s, meta_data:%s", + e, + row_id, + content, + metadata, + ) + except tablestore.OTSServiceError as e: + self._logger.exception( + "Tablestore put row failed with client error:%s, id:%s, content:%s, meta_data:%s, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + e, + row_id, + content, + metadata, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + + def _delete_row(self, row_id: str) -> None: + primary_key = [("id", row_id)] + try: + self._tablestore_client.delete_row(self._table_name, primary_key, None) + self._logger.info("Tablestore delete row successfully. id:%s", row_id) + except tablestore.OTSClientError as e: + self._logger.exception( + "Tablestore delete row failed with client error:%s, id:%s", e, row_id + ) + except tablestore.OTSServiceError as e: + self._logger.exception( + "Tablestore delete row failed with client error:%s, id:%s, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + e, + row_id, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + + def _delete_all(self) -> None: + inclusive_start_primary_key = [("id", tablestore.INF_MIN)] + exclusive_end_primary_key = [("id", tablestore.INF_MAX)] + total = 0 + try: + while True: + ( + consumed, + next_start_primary_key, + row_list, + next_token, + ) = self._tablestore_client.get_range( + self._table_name, + tablestore.Direction.FORWARD, + inclusive_start_primary_key, + exclusive_end_primary_key, + [], + 5000, + max_version=1, + ) + for row in row_list: + self._tablestore_client.delete_row( + self._table_name, row.primary_key, None + ) + total += 1 + if next_start_primary_key is not None: + inclusive_start_primary_key = next_start_primary_key + else: + break + except tablestore.OTSClientError as e: + self._logger.exception( + "Tablestore delete row failed with client error:%s", e + ) + except tablestore.OTSServiceError as e: + self._logger.exception( + "Tablestore delete row failed with client error:%s, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + e, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + self._logger.info("delete all rows count:%d", total) + + def _search( + self, query: VectorStoreQuery, knn_top_k: int + ) -> VectorStoreQueryResult: + filter_query = self._parse_filters(query.filters) + ots_query = tablestore.KnnVectorQuery( + field_name=self._vector_field, + top_k=knn_top_k, + float32_query_vector=query.query_embedding, + filter=filter_query, + ) + sort = tablestore.Sort( + sorters=[tablestore.ScoreSort(sort_order=tablestore.SortOrder.DESC)] + ) + search_query = tablestore.SearchQuery( + ots_query, limit=query.similarity_top_k, get_total_count=False, sort=sort + ) + try: + search_response = self._tablestore_client.search( + table_name=self._table_name, + index_name=self._index_name, + search_query=search_query, + columns_to_get=tablestore.ColumnsToGet( + return_type=tablestore.ColumnReturnType.ALL + ), + ) + self._logger.info( + "Tablestore search successfully. request_id:%s", + search_response.request_id, + ) + return self._to_query_result(search_response) + except tablestore.OTSClientError as e: + self._logger.exception("Tablestore search failed with client error:%s", e) + except tablestore.OTSServiceError as e: + self._logger.exception( + "Tablestore search failed with client error:%s, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + e, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + + def _filter( + self, + filters: Optional[MetadataFilters] = None, + return_type: Optional[ + tablestore.ColumnReturnType + ] = tablestore.ColumnReturnType.ALL, + limit: Optional[int] = 100, + ) -> List: + if filters is None: + return [] + filter_query = self._parse_filters(filters) + search_query = tablestore.SearchQuery( + filter_query, limit=1, get_total_count=False + ) + all_rows = [] + try: + # first round + search_response = self._tablestore_client.search( + table_name=self._table_name, + index_name=self._index_name, + search_query=search_query, + columns_to_get=tablestore.ColumnsToGet(return_type=return_type), + ) + all_rows.extend(search_response.rows) + # loop + while search_response.next_token: + search_query.next_token = search_response.next_token + search_response = self._tablestore_client.search( + table_name=self._table_name, + index_name=self._index_name, + search_query=search_query, + columns_to_get=tablestore.ColumnsToGet(return_type=return_type), + ) + all_rows.extend(search_response.rows) + return all_rows + except tablestore.OTSClientError as e: + self._logger.exception("Tablestore search failed with client error:%s", e) + except tablestore.OTSServiceError as e: + self._logger.exception( + "Tablestore search failed with client error:%s, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + e, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + + def _to_get_nodes_result(self, rows) -> List[TextNode]: + nodes = [] + for row in rows: + node_id = row[0][0][1] + meta_data = {} + text = None + embedding = None + for col in row[1]: + key = col[0] + val = col[1] + if key == self._text_field: + text = val + continue + if key == self._vector_field: + embedding = json.loads(val) + continue + meta_data[key] = val + node = TextNode( + id_=node_id, + text=text, + metadata=meta_data, + embedding=embedding, + ) + nodes.append(node) + return nodes + + def _get_row(self, row_id: str) -> Optional[TextNode]: + primary_key = [("id", row_id)] + try: + _, row, _ = self._tablestore_client.get_row( + self._table_name, primary_key, None, None, 1 + ) + self._logger.debug("Tablestore get row successfully. id:%s", row_id) + if row is None: + return None + node_id = row.primary_key[0][1] + meta_data = {} + text = None + embedding = None + for col in row.attribute_columns: + key = col[0] + val = col[1] + if key == self._text_field: + text = val + continue + if key == self._vector_field: + embedding = json.loads(val) + continue + meta_data[key] = val + return TextNode( + id_=node_id, + text=text, + metadata=meta_data, + embedding=embedding, + ) + except tablestore.OTSClientError as e: + self._logger.exception( + "Tablestore get row failed with client error:%s, id:%s", e, row_id + ) + except tablestore.OTSServiceError as e: + self._logger.exception( + "Tablestore get row failed with client error:%s, " + "id:%s, http_status:%d, error_code:%s, error_message:%s, request_id:%s", + e, + row_id, + e.get_http_status(), + e.get_error_code(), + e.get_error_message(), + e.get_request_id(), + ) + + def _to_query_result(self, search_response) -> VectorStoreQueryResult: + nodes = [] + ids = [] + similarities = [] + for hit in search_response.search_hits: + row = hit.row + score = hit.score + node_id = row[0][0][1] + meta_data = {} + text = None + embedding = None + for col in row[1]: + key = col[0] + val = col[1] + if key == self._text_field: + text = val + continue + if key == self._vector_field: + embedding = json.loads(val) + continue + meta_data[key] = val + node = TextNode( + id_=node_id, + text=text, + metadata=meta_data, + embedding=embedding, + ) + ids.append(node_id) + nodes.append(node) + similarities.append(score) + return VectorStoreQueryResult(nodes=nodes, ids=ids, similarities=similarities) + + def _parse_filters_recursively( + self, filters: MetadataFilters + ) -> tablestore.BoolQuery: + """Parse (possibly nested) MetadataFilters to equivalent tablestore search expression.""" + bool_query = tablestore.BoolQuery( + must_queries=[], + must_not_queries=[], + filter_queries=[], + should_queries=[], + minimum_should_match=None, + ) + if filters.condition is FilterCondition.AND: + bool_clause = bool_query.must_queries + elif filters.condition is FilterCondition.OR: + bool_clause = bool_query.should_queries + else: + raise ValueError(f"Unsupported filter condition: {filters.condition}") + + for filter_item in filters.filters: + if isinstance(filter_item, MetadataFilter): + bool_clause.append(self._parse_filter(filter_item)) + elif isinstance(filter_item, MetadataFilters): + bool_clause.append(self._parse_filters_recursively(filter_item)) + else: + raise ValueError(f"Unsupported filter type: {type(filter_item)}") + + return bool_query + + def _parse_filters(self, filters: Optional[MetadataFilters]) -> tablestore.Query: + """Parse MetadataFilters to equivalent OpenSearch expression.""" + if filters is None: + return tablestore.MatchAllQuery() + return self._parse_filters_recursively(filters=filters) + + @staticmethod + def _parse_filter(filter_item: MetadataFilter) -> tablestore.Query: + key = filter_item.key + val = filter_item.value + op = filter_item.operator + + if op == FilterOperator.EQ: + return tablestore.TermQuery(field_name=key, column_value=val) + elif op == FilterOperator.GT: + return tablestore.RangeQuery( + field_name=key, range_from=val, include_lower=False + ) + elif op == FilterOperator.GTE: + return tablestore.RangeQuery( + field_name=key, range_from=val, include_lower=True + ) + elif op == FilterOperator.LT: + return tablestore.RangeQuery( + field_name=key, range_to=val, include_upper=False + ) + elif op == FilterOperator.LTE: + return tablestore.RangeQuery( + field_name=key, range_to=val, include_upper=True + ) + elif op == FilterOperator.NE: + bq = tablestore.BoolQuery( + must_queries=[], + must_not_queries=[], + filter_queries=[], + should_queries=[], + minimum_should_match=None, + ) + bq.must_not_queries.append( + tablestore.TermQuery(field_name=key, column_value=val) + ) + return bq + elif op in [FilterOperator.IN, FilterOperator.ANY]: + return tablestore.TermsQuery(field_name=key, column_values=val) + elif op == FilterOperator.NIN: + bq = tablestore.BoolQuery( + must_queries=[], + must_not_queries=[], + filter_queries=[], + should_queries=[], + minimum_should_match=None, + ) + bq.must_not_queries.append( + tablestore.TermsQuery(field_name=key, column_values=val) + ) + return bq + elif op == FilterOperator.ALL: + bq = tablestore.BoolQuery( + must_queries=[], + must_not_queries=[], + filter_queries=[], + should_queries=[], + minimum_should_match=None, + ) + for val_item in val: + bq.must_queries.append( + tablestore.TermQuery(field_name=key, column_value=val_item) + ) + return bq + elif op == FilterOperator.TEXT_MATCH: + return tablestore.MatchQuery(field_name=key, text=val) + elif op == FilterOperator.CONTAINS: + return tablestore.WildcardQuery(field_name=key, value=f"*{val}*") + else: + raise ValueError(f"Unsupported filter operator: {filter_item.operator}") + + @property + def client(self) -> Any: + """Get client.""" + return self._tablestore_client + + def add(self, nodes: List[BaseNode], **kwargs: Any) -> List[str]: + """Add nodes to vector store.""" + if len(nodes) == 0: + return [] + ids = [] + for node in nodes: + self._write_row( + row_id=node.node_id, + content=node.text, + embedding_vector=node.get_embedding(), + metadata=node.metadata, + ) + ids.append(node.node_id) + return ids + + def delete_nodes( + self, + node_ids: Optional[List[str]] = None, + filters: Optional[MetadataFilters] = None, + **delete_kwargs: Any, + ) -> None: + """Delete nodes from vector store.""" + if node_ids is None and filters is None: + raise RuntimeError("node_ids and filters cannot be None at the same time.") + if node_ids is not None and filters is not None: + raise RuntimeError("node_ids and filters cannot be set at the same time.") + if filters is not None: + rows = self._filter( + filters=filters, return_type=tablestore.ColumnReturnType.NONE + ) + for row in rows: + self._delete_row(row[0][0][1]) + if node_ids is not None: + for node_id in node_ids: + self._delete_row(node_id) + + def get_nodes( + self, + node_ids: Optional[List[str]] = None, + filters: Optional[MetadataFilters] = None, + ) -> List[BaseNode]: + """Get nodes from vector store.""" + if node_ids is None and filters is None: + raise RuntimeError("node_ids and filters cannot be None at the same time.") + if node_ids is not None and filters is not None: + raise RuntimeError("node_ids and filters cannot be set at the same time.") + if filters is not None: + rows = self._filter( + filters=filters, return_type=tablestore.ColumnReturnType.ALL + ) + return self._to_get_nodes_result(rows) + if node_ids is not None: + nodes = [] + for node_id in node_ids: + nodes.append(self._get_row(node_id)) + return nodes + return [] + + def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None: + """Delete nodes using with ref_doc_id.""" + rows = self._filter( + filters=MetadataFilters( + filters=[ + MetadataFilter( + key=self._ref_doc_id_field, + value=ref_doc_id, + operator=FilterOperator.EQ, + ), + ], + condition=FilterCondition.AND, + ), + return_type=tablestore.ColumnReturnType.NONE, + ) + for row in rows: + self._delete_row(row[0][0][1]) + + def clear(self) -> None: + """Clear all nodes from configured vector store.""" + self._delete_all() + + def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult: + """Query vector store.""" + knn_top_k = query.similarity_top_k + if "knn_top_k" in kwargs: + knn_top_k = kwargs["knn_top_k"] + return self._search(query=query, knn_top_k=knn_top_k) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/pyproject.toml new file mode 100644 index 0000000000000..9a268656d3eb7 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/pyproject.toml @@ -0,0 +1,54 @@ +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] + +[tool.codespell] +check-filenames = true +check-hidden = true +skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" + +[tool.llamahub] +contains_example = false +import_path = "llama_index.vector_stores.tablestore" + +[tool.llamahub.class_authors] +TablestoreVectorStore = "llama-index" + +[tool.mypy] +disallow_untyped_defs = true +exclude = ["_static", "build", "examples", "notebooks", "venv"] +ignore_missing_imports = true +python_version = "3.8" + +[tool.poetry] +authors = ["xunjian.sl "] +description = "llama-index vector_stores tablestore integration" +license = "MIT" +name = "llama-index-vector-stores-tablestore" +packages = [{include = "llama_index/"}] +readme = "README.md" +version = "0.1.0" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +llama-index-core = "^0.10.1" +tablestore = "^6.0.0" + +[tool.poetry.group.dev.dependencies] +black = {extras = ["jupyter"], version = "<=23.9.1,>=23.7.0"} +codespell = {extras = ["toml"], version = ">=v2.2.6"} +ipython = "8.10.0" +jupyter = "^1.0.0" +mypy = "0.991" +pre-commit = "3.2.0" +pylint = "2.15.10" +pytest = "7.2.1" +pytest-mock = "3.11.1" +ruff = "0.0.292" +tree-sitter-languages = "^1.8.0" +types-Deprecated = ">=0.1.0" +types-PyYAML = "^6.0.12.12" +types-protobuf = "^4.24.0.4" +types-redis = "4.5.5.0" +types-requests = "2.28.11.8" +types-setuptools = "67.1.0.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/BUILD new file mode 100644 index 0000000000000..dabf212d7e716 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/BUILD @@ -0,0 +1 @@ +python_tests() diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/test_vector_stores_tablestore.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/test_vector_stores_tablestore.py new file mode 100644 index 0000000000000..7e3abe2d70365 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tablestore/tests/test_vector_stores_tablestore.py @@ -0,0 +1,167 @@ +import os + +import pytest +import tablestore + +from llama_index.core import MockEmbedding +from llama_index.core.schema import TextNode +from llama_index.core.vector_stores.types import ( + BasePydanticVectorStore, + VectorStoreQuery, + MetadataFilters, + MetadataFilter, + FilterOperator, + FilterCondition, +) +from llama_index.vector_stores.tablestore import TablestoreVectorStore + + +def test_class(): + names_of_base_classes = [b.__name__ for b in TablestoreVectorStore.__mro__] + assert BasePydanticVectorStore.__name__ in names_of_base_classes + + +def test_tablestore() -> None: + """Test end to end construction and search.""" + end_point = os.getenv("end_point") + instance_name = os.getenv("instance_name") + access_key_id = os.getenv("access_key_id") + access_key_secret = os.getenv("access_key_secret") + if ( + end_point is None + or instance_name is None + or access_key_id is None + or access_key_secret is None + ): + pytest.skip( + "end_point is None or instance_name is None or " + "access_key_id is None or access_key_secret is None" + ) + + # 1. create tablestore vector store + test_dimension_size = 4 + ref_doc_id_field = "ref_doc_id" + store = TablestoreVectorStore( + endpoint=os.getenv("end_point"), + instance_name=os.getenv("instance_name"), + access_key_id=os.getenv("access_key_id"), + access_key_secret=os.getenv("access_key_secret"), + vector_dimension=test_dimension_size, + vector_metric_type=tablestore.VectorMetricType.VM_COSINE, + ref_doc_id_field=ref_doc_id_field, + # metadata mapping is used to filter non-vector fields. + metadata_mappings=[ + tablestore.FieldSchema( + "type", + tablestore.FieldType.KEYWORD, + index=True, + enable_sort_and_agg=True, + ), + tablestore.FieldSchema( + "time", tablestore.FieldType.LONG, index=True, enable_sort_and_agg=True + ), + ], + ) + + # 2. create table and index + store.create_table_if_not_exist() + store.create_search_index_if_not_exist() + + # 3. new a mock embedding for test + embedder = MockEmbedding(test_dimension_size) + + # 4. prepare some docs + movies = [ + TextNode( + id_="1", + text="hello world", + metadata={"type": "a", "time": 1995, ref_doc_id_field: "1"}, + ), + TextNode( + id_="2", + text="a b c", + metadata={"type": "a", "time": 1990, ref_doc_id_field: "1"}, + ), + TextNode( + id_="3", + text="sky cloud table", + metadata={"type": "a", "time": 2009, ref_doc_id_field: "2"}, + ), + TextNode( + id_="4", + text="dog cat", + metadata={"type": "a", "time": 2023, ref_doc_id_field: "3"}, + ), + TextNode( + id_="5", + text="computer python java", + metadata={"type": "b", "time": 2018, ref_doc_id_field: "4"}, + ), + TextNode( + id_="6", + text="java python js nodejs", + metadata={"type": "c", "time": 2010, ref_doc_id_field: "5"}, + ), + TextNode( + id_="7", + text="sdk golang python", + metadata={"type": "a", "time": 2023, ref_doc_id_field: "6"}, + ), + ] + for movie in movies: + movie.embedding = embedder.get_text_embedding(movie.text) + + # 5. write some docs + ids = store.add(movies) + assert len(ids) == 7 + + nodes = store.get_nodes(["0", "1", "7", "8"]) + assert len(nodes) == 4 + assert nodes[0] is None + assert nodes[3] is None + + nodes = store.get_nodes( + filters=MetadataFilters( + filters=[ + MetadataFilter(key="time", value=2000, operator=FilterOperator.GTE), + ], + condition=FilterCondition.AND, + ) + ) + assert len(nodes) == 5 + + # 6. delete docs + store.delete_nodes(["0"]) + store.delete_nodes( + filters=MetadataFilters( + filters=[ + MetadataFilter(key="time", value=1990, operator=FilterOperator.GTE), + MetadataFilter(key="time", value=1995, operator=FilterOperator.LT), + ], + condition=FilterCondition.AND, + ) + ) + store.delete(ref_doc_id="1") + + # 7. query with filters + query_embedding = embedder.get_text_embedding("nature fight physical") + # modify it for test + query_embedding[0] = 0.1 + query_result = store.query( + query=VectorStoreQuery( + query_embedding=query_embedding, + similarity_top_k=5, + filters=MetadataFilters( + filters=[ + MetadataFilter(key="type", value="a", operator=FilterOperator.EQ), + MetadataFilter(key="time", value=2020, operator=FilterOperator.LTE), + ], + condition=FilterCondition.AND, + ), + ), + ) + print(query_result) + assert query_result is not None + assert query_result.ids is not None + assert query_result.similarities is not None + assert query_result.similarities is not None diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/llama_index/vector_stores/tencentvectordb/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/llama_index/vector_stores/tencentvectordb/base.py index ffe736ef7bec8..10d525f2d16e4 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/llama_index/vector_stores/tencentvectordb/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/llama_index/vector_stores/tencentvectordb/base.py @@ -278,7 +278,7 @@ def _create_collection( collection_name: str = self._compute_collection_name( database_name, collection_params ) - collection_description = collection_params.collection_description + collection_description = collection_params._collection_description if collection_params is None: raise ValueError(VALUE_NONE_ERROR.format("collection_params")) @@ -300,9 +300,9 @@ def _compute_collection_name( database_name: str, collection_params: CollectionParams ) -> str: if database_name == DEFAULT_DATABASE_NAME: - return collection_params.collection_name - if collection_params.collection_name != DEFAULT_COLLECTION_NAME: - return collection_params.collection_name + return collection_params._collection_name + if collection_params._collection_name != DEFAULT_COLLECTION_NAME: + return collection_params._collection_name else: return database_name + "_" + DEFAULT_COLLECTION_NAME diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/pyproject.toml index 472ce49116b39..85aad2ff12658 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-tencentvectordb/pyproject.toml @@ -27,11 +27,12 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-vector-stores-tencentvectordb" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" llama-index-core = "^0.11.0" +tcvectordb = "^1.3.13" [tool.poetry.group.dev.dependencies] ipython = "8.10.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/base.py index 49dc067d26050..7e08c417cbd2a 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/base.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/base.py @@ -1,4 +1,5 @@ import asyncio +import json from logging import getLogger from typing import Any, List, Dict, Optional @@ -18,6 +19,8 @@ from wordlift_client.exceptions import ApiException from wordlift_client.models import AccountInfo, NodeRequestMetadataValue +from .metadata_filters_to_filters import MetadataFiltersToFilters + log = getLogger(__name__) @@ -75,7 +78,12 @@ def __init__( fields: Optional[List[str]] = None, ): super().__init__(use_async=True) - nest_asyncio.apply() + + try: + nest_asyncio.apply() + except ValueError: + # We may not be in asyncio + pass if configuration is None: self._configuration = _make_configuration(key=key) @@ -211,17 +219,22 @@ def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResul async def aquery( self, query: VectorStoreQuery, **kwargs: Any ) -> VectorStoreQueryResult: + filters = MetadataFiltersToFilters.metadata_filters_to_filters( + query.filters if query.filters else [] + ) if query.query_str: request = VectorSearchQueryRequest( query_string=query.query_str, similarity_top_k=query.similarity_top_k, fields=self._fields, + filters=filters, ) else: request = VectorSearchQueryRequest( query_embedding=query.query_embedding, similarity_top_k=query.similarity_top_k, fields=self._fields, + filters=filters, ) async with wordlift_client.ApiClient(self._configuration) as api_client: @@ -232,19 +245,24 @@ async def aquery( vector_search_query_request=request, ) except ApiException as e: - log.error(f"Error querying for entities: {e}", exc_info=True) + log.error( + f"Error querying for entities with the following request: {json.dumps(api_client.sanitize_for_serialization(request))}", + exc_info=True, + ) nodes: List[TextNode] = [] similarities: List[float] = [] ids: List[str] = [] for item in page.items: - metadata = {**item.metadata, **item.fields} + metadata = item.metadata if item.metadata else {} + fields = item.fields if item.fields else {} + metadata = {**metadata, **fields} nodes.append( TextNode( - text=item.text, - id_=item.node_id, + text=item.text if item.text else "", + id_=item.node_id if item.node_id else "", embedding=(item.embeddings if "embeddings" in item else None), metadata=metadata, ) diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/metadata_filters_to_filters.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/metadata_filters_to_filters.py new file mode 100644 index 0000000000000..af5e41ed3425e --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/llama_index/vector_stores/wordlift/metadata_filters_to_filters.py @@ -0,0 +1,86 @@ +from llama_index.core.vector_stores import ( + MetadataFilters, + FilterOperator, + FilterCondition, +) +from wordlift_client import Filter, FilterValue + + +class MetadataFiltersToFilters: + @staticmethod + def metadata_filters_to_filters(metadata_filters: MetadataFilters): + # Return an empty list if there are no filters. + if ( + not hasattr(metadata_filters, "filters") + or len(metadata_filters.filters) == 0 + ): + return [] + + # Only one filter. + if len(metadata_filters.filters) == 1: + metadata_filter = metadata_filters.filters[0] + return [ + Filter( + key=metadata_filter.key, + operator=MetadataFiltersToFilters.metadata_filter_operator_to_filter_operator( + metadata_filter.operator + ), + value=FilterValue(metadata_filter.value), + ) + ] + + # Prepare the list of filters. + filters = [] + for metadata_filter in metadata_filters.filters: + filters.append( + Filter( + key=metadata_filter.key, + operator=MetadataFiltersToFilters.metadata_filter_operator_to_filter_operator( + metadata_filter.operator + ), + value=FilterValue(metadata_filter.value), + ) + ) + + # Join the filters abed on the metadata filter condition. + return [ + Filter( + operator=MetadataFiltersToFilters.metadata_filter_condition_to_filter_operators( + metadata_filters.condition + ), + filters=filters, + ) + ] + + @staticmethod + def metadata_filter_operator_to_filter_operator(filter_operator: FilterOperator): + # 'EQ', 'GT', 'LT', 'NE', 'GTE', 'LTE', 'IN', 'NIN', 'AND', 'OR' + if filter_operator == FilterOperator.EQ: + return "EQ" # default operator (string, int, float) + elif filter_operator == FilterOperator.GT: + return "GT" # greater than (int, float) + elif filter_operator == FilterOperator.LT: + return "LT" # less than (int, float) + elif filter_operator == FilterOperator.NE: + return "NE" # not equal to (string, int, float) + elif filter_operator == FilterOperator.GTE: + return "GTE" # greater than or equal to (int, float) + elif filter_operator == FilterOperator.LTE: + return "LTE" # less than or equal to (int, float) + elif filter_operator == FilterOperator.IN: + return "IN" # In array (string or number) + elif filter_operator == FilterOperator.NIN: + return "NIN" # Not in array (string or number) + else: + raise ValueError(f"Invalid filter operator: {filter_operator}") + + @staticmethod + def metadata_filter_condition_to_filter_operators( + filter_condition: FilterCondition, + ): + if filter_condition == FilterCondition.AND: + return "AND" + elif filter_condition == FilterCondition.OR: + return "OR" + else: + raise ValueError(f"Invalid filter condition: {filter_condition}") diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/pyproject.toml index 7be7eaf7c73f4..73244fc777b8b 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/pyproject.toml +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/pyproject.toml @@ -27,7 +27,7 @@ license = "MIT" name = "llama-index-vector-stores-wordlift" packages = [{include = "llama_index/"}] readme = "README.md" -version = "0.4.3" +version = "0.4.5" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" @@ -36,7 +36,7 @@ aiohttp = ">=3.7.4" python-dateutil = ">=2.8.2" aiohttp-retry = ">=1.2" urllib3 = ">=1.21.1,<3" -wordlift-client = ">=1.42.0,<2" +wordlift-client = ">=1.46.0,<2" docker = "^7.1.0" llama-index-core = "^0.11.0" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/test_wordlift.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/test_wordlift.py index b1a7860fa1358..5e2aceada507f 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/test_wordlift.py +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/test_wordlift.py @@ -6,12 +6,18 @@ import requests from docker.models.containers import Container from llama_index.core.schema import NodeRelationship, RelatedNodeInfo, TextNode -from llama_index.core.vector_stores import VectorStoreQuery -from llama_index.vector_stores.wordlift import WordliftVectorStore +from llama_index.core.vector_stores import ( + VectorStoreQuery, + MetadataFilters, + MetadataFilter, + FilterOperator, +) from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry from wordlift_client import Configuration +from llama_index.vector_stores.wordlift import WordliftVectorStore + try: # Should be installed as pyvespa-dependency import docker @@ -910,7 +916,10 @@ def configuration(wiremock_server, random_port: int) -> Configuration: @pytest.fixture(scope="session") def vector_store(configuration: Configuration) -> WordliftVectorStore: - return WordliftVectorStore(configuration=configuration) + return WordliftVectorStore( + configuration=configuration, + fields=["schema:url", "schema:headline", "schema:text"], + ) @pytest.mark.skipif(not docker_available, reason="Docker not available") @@ -981,3 +990,44 @@ async def test_add_to_wordlift_and_query( assert res.nodes assert res.nodes[0].get_content() == "lorem ipsum" + + +@pytest.mark.skipif(not docker_available, reason="Docker not available") +@pytest.mark.asyncio() +@pytest.mark.parametrize("use_async", [True, False]) +async def test_add_to_wordlift_and_query_with_filters( + vector_store: WordliftVectorStore, + node_embeddings: List[TextNode], + use_async: bool, +) -> None: + filters = MetadataFilters( + filters=[ + MetadataFilter( + key="rdf:type", + operator=FilterOperator.EQ, + value="http://schema.org/Webpage", + ) + ] + ) + + if use_async: + await vector_store.async_add(node_embeddings) + res = await vector_store.aquery( + VectorStoreQuery( + query_embedding=node_embeddings[0].embedding, + similarity_top_k=1, + filters=filters, + ) + ) + else: + vector_store.add(node_embeddings) + res = vector_store.query( + VectorStoreQuery( + query_embedding=node_embeddings[0].embedding, + similarity_top_k=1, + filters=filters, + ) + ) + + assert res.nodes + assert res.nodes[0].get_content() == "lorem ipsum" diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_1.json b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_1.json index 743d11622e042..72ca5b6bfceba 100644 --- a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_1.json +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_1.json @@ -12,7 +12,277 @@ "Content-Type": { "equalTo": "application/json" } - } + }, + "bodyPatterns": [ + { + "equalToJson": { + "fields": ["schema:url", "schema:headline", "schema:text"], + "filters": [], + "query_embedding": [ + -0.02409953996539116, 0.007545656058937311, -0.010837538167834282, + -0.015247111208736897, 0.020510902628302574, 0.02260207198560238, + 0.03624352440237999, -0.02312600426375866, 0.03509214147925377, + 0.009039043448865414, -0.04192536696791649, 0.0646052435040474, + 0.05423989146947861, -0.006866314448416233, 0.032361097633838654, + -0.003253199392929673, 0.05170004814863205, -0.054772719740867615, + 0.008567743934690952, -0.008800626732409, -0.035540562123060226, + -0.0036080495920032263, -0.012353642843663692, + -0.049400247633457184, 0.0490705706179142, -0.017110642045736313, + 0.0434158556163311, 0.0047591072507202625, -0.04901169613003731, + 0.04313654080033302, 0.08862752467393875, -0.0305459126830101, + 0.031628116965293884, -0.0023637129925191402, + -0.0062087783589959145, 0.016893181949853897, 0.02979143150150776, + 0.016508951783180237, 0.02346094138920307, 0.0370299331843853, + -0.022708870470523834, -0.040380027145147324, 0.027732161805033684, + -0.03321043401956558, 0.028203414753079414, -0.03547737002372742, + -0.003607464488595724, -0.011886986903846264, 0.08876273781061172, + -0.035582806915044785, 0.01728527806699276, 0.033559445291757584, + 0.03006189689040184, -0.02276725322008133, -0.02573985420167446, + 0.018779633566737175, 0.018232546746730804, 0.04365724325180054, + 0.019853554666042328, -0.056149423122406006, -0.040290556848049164, + 0.055537160485982895, 0.01000349409878254, 0.042989231646060944, + 0.06469859182834625, -0.07268784195184708, 0.029832176864147186, + -0.00694637605920434, -0.019039496779441833, -0.028887171298265457, + 0.0611429437994957, -0.07661744952201843, 0.008372906595468521, + 0.07736742496490479, 0.01697900891304016, 0.00455197598785162, + -0.01660764403641224, 0.013384402729570866, 0.016262754797935486, + 0.04430804401636124, 0.05316875874996185, -0.017527703195810318, + -0.015531433746218681, 0.02427724562585354, 0.06978478282690048, + -0.0215159859508276, -0.0015113558620214462, -0.0005745171220041811, + -0.058096420019865036, 0.06534607708454132, 0.09807957708835602, + 0.03466407209634781, 0.02070295810699463, 0.06667006760835648, + 0.008454645052552223, 0.004790779203176498, -0.0021362053230404854, + 0.012438371777534485, -0.025263242423534393, 0.04503288492560387, + -0.014884737320244312, -0.009326054714620113, -0.03726727515459061, + -0.003038907190784812, 0.045574598014354706, 0.02176540531218052, + -0.022943837568163872, 0.03223365545272827, -0.0231216698884964, + -0.008260810747742653, -0.0838012844324112, -0.0041016764007508755, + -0.019858187064528465, -0.025245968252420425, -0.08231323212385178, + -0.0008271003025583923, 0.06744734942913055, -0.022615060210227966, + -0.036366865038871765, -0.007173497695475817, 0.023905375972390175, + -0.026212558150291443, 0.008525801822543144, 0.05790192261338234, + 0.03967795893549919, -0.019248196855187416, -0.03145810589194298, + -0.06143287941813469, -0.025088703259825706, 0.008470161817967892, + -0.019712727516889572, 0.0201752707362175, -0.024380167946219444, + -0.048689160495996475, 0.030698394402861595, 0.06519214808940887, + 0.006988975685089827, -0.030256111174821854, 0.015067224390804768, + -0.009249436669051647, 0.005610473453998566, 0.030035214498639107, + 0.0062820687890052795, -0.01272163912653923, 0.0031057705637067556, + -0.08999253064393997, 0.03721488639712334, -0.028286410495638847, + -0.041241131722927094, -0.011454920284450054, -0.024434737861156464, + 0.012364787049591541, 0.01152063999325037, 0.013034306466579437, + -0.015703829005360603, -0.07446613907814026, -0.035228293389081955, + 0.029677776619791985, -0.0008637132123112679, 0.03882776200771332, + -0.03331062197685242, 0.06260097771883011, -0.03643746301531792, + 0.03024205192923546, -0.04027588292956352, -0.006302598863840103, + -0.03876963630318642, 0.022751253098249435, 0.05228758603334427, + -0.00650898227468133, 0.00784189160913229, -0.02789650298655033, + -0.03584962338209152, -0.03039231151342392, 0.005157549399882555, + 0.038540806621313095, 0.016989191994071007, 0.002693451475352049, + 0.03680325672030449, -0.02772810310125351, 0.04701118916273117, + 0.00428611459210515, 0.008383968845009804, 0.01370905339717865, + -0.03272991627454758, 0.025361433625221252, 0.07640355080366135, + -0.09241847693920135, -0.018185008317232132, -0.0033845037687569857, + 0.016245491802692413, -0.010160512290894985, -0.01563146524131298, + -0.05825433135032654, 0.01485800463706255, -0.012620965018868446, + 0.012889757752418518, -0.07174727320671082, 0.0036001859698444605, + 0.0003497107536531985, -0.010015580803155899, -0.026276255026459694, + -0.054649949073791504, 0.0178300142288208, -0.03058459609746933, + -0.0046766879968345165, 0.02828553318977356, 0.004914792720228434, + 0.006221550516784191, 0.02017192542552948, 0.06884320825338364, + -0.005684717558324337, -0.03233734890818596, 0.028631558641791344, + -0.037736676633358, 0.012628731317818165, 0.011251229792833328, + -0.01216588169336319, -0.0449829027056694, 0.05112060159444809, + 0.0321006216108799, 0.004842250142246485, -0.028930027037858963, + -0.03781994432210922, 0.027888713404536247, -0.0020639710128307343, + -0.012764016166329384, 0.001849374850280583, -0.06090773642063141, + 0.024058688431978226, -0.07892243564128876, -0.022651061415672302, + 0.03663799911737442, -0.015678945928812027, 0.034871794283390045, + 0.02377290651202202, 0.01880395971238613, 0.005291060544550419, + 0.06636939197778702, 0.07682432979345322, 0.06366447359323502, + 0.045753464102745056, -0.021396778523921967, -0.06412482261657715, + -0.022975077852606773, -0.015396900475025177, -0.01382171455770731, + 0.04801446571946144, -0.044695984572172165, 0.010728792287409306, + -0.013164067640900612, -0.06426115334033966, 0.016030244529247284, + 0.01712176389992237, 0.003342918585985899, -0.024401094764471054, + -0.04742956534028053, 0.05057492479681969, -0.06447580456733704, + 0.003736401442438364, -0.0038414313457906246, -0.044975876808166504, + 0.01563481241464615, -0.0513794831931591, 0.03116164542734623, + 0.05475221946835518, -0.018825948238372803, -0.05825172737240791, + 0.026944046840071678, -0.01748691312968731, -0.015295912511646748, + 0.041276704519987106, 0.06435948610305786, 0.057869717478752136, + 0.007085255812853575, -0.0016246287850663066, -0.049611661583185196, + -0.02173301763832569, -0.03379219397902489, -0.010483022779226303, + -0.04382925480604172, -0.019825980067253113, -0.028733380138874054, + 0.00799755472689867, -0.010613085702061653, 0.03142009675502777, + 0.0993402749300003, 0.013524055480957031, 0.04996125027537346, + -0.05237795040011406, 0.00459002610296011, 0.03747300058603287, + -0.007390610408037901, 0.03675704449415207, 0.028937028720974922, + 0.0490734800696373, 0.07018966227769852, 0.02625582180917263, + -0.012768297456204891, -0.017309138551354408, 0.009925411082804203, + 0.002874719677492976, -0.03868229687213898, -0.011665035039186478, + 0.016460483893752098, -0.016710275784134865, 0.026092402637004852, + -0.029528427869081497, 0.027799755334854126, 0.00869158748537302, + 0.00814155675470829, 0.025196386501193047, 0.010806374251842499, + 0.007144266739487648, 0.009723981842398643, 0.040421262383461, + -0.014507091604173183, 0.00026819604681804776, 0.04148963838815689, + -0.011153719387948513, 0.011801899410784245, -0.04976220428943634, + 0.01822030358016491, -0.022088022902607918, 0.010638813488185406, + 0.050233982503414154, 0.03569936379790306, 0.024639304727315903, + 0.04990961775183678, 0.006694141309708357, 0.03368678316473961, + 0.05511782318353653, 0.006736745126545429, -0.03352257236838341, + 0.06660161167383194, 0.012697044759988785, -0.0414629764854908, + 0.00029493123292922974, -0.010526367463171482, + -0.015528022311627865, -0.024239003658294678, 0.014093346893787384, + -0.05118495970964432, -0.04236882925033569, 0.026792580261826515, + -0.04413650929927826, -0.008981052786111832, 0.02730277180671692, + 0.06500429660081863, 0.011842386797070503, -0.005152833182364702, + -0.011248798109591007, 0.0014251028187572956, -0.058844491839408875, + -0.03179366886615753, 0.014862962067127228, 0.0031557243783026934, + 0.006256013177335262, 0.022348832339048386, 0.018381774425506592, + 0.05182422697544098, -0.007802502252161503, 0.027966707944869995, + -0.015043427236378193, 0.06696246564388275, 0.06933777779340744, + 0.06229571998119354, 0.025468071922659874, 0.04187721014022827, + 0.030144240707159042, -0.04005308449268341, 0.018866809085011482, + -0.011857784353196621, 0.028774423524737358, 0.0008001378737390041, + 0.051755640655756, -0.0763072744011879, -0.035968881100416183, + -0.06542054563760757, -0.01490574050694704, 0.03574415668845177, + 0.008126146160066128, 0.02864741161465645, -3.158819890813902e-5, + 0.022258171811699867, 0.009770577773451805, -0.044916730374097824, + -0.07808850705623627, 0.0390549972653389, -0.01499694213271141, + 0.007959951646625996, -0.005930764134973288, -0.02100251242518425, + -0.029872389510273933, -0.012773459777235985, -0.043038900941610336, + 0.04595667123794556, -0.06141292303800583, 0.03035324439406395, + 0.00792359933257103, -0.020943107083439827, 0.02207357995212078, + 0.011123895645141602, 0.03734105825424194, -0.010610148310661316, + -0.052318185567855835, -0.015035799704492092, -0.025462502613663673, + 0.02562587894499302, -0.003998974338173866, 0.009129341691732407, + 0.07043135166168213, -0.004517677705734968, -0.08051703125238419, + 0.0014796281466260552, 0.05828672647476196, 0.02732127346098423, + -0.03499415144324303, -0.031034745275974274, -0.005702692084014416, + -0.040223129093647, -0.004799958318471909, 0.018715567886829376, + 0.04384651035070419, 0.013568849302828312, -0.011443322524428368, + 0.005295567214488983, 0.026522139087319374, 0.002658095210790634, + -0.03300406038761139, 0.018211429938673973, -0.013894042000174522, + 0.013989650644361973, 0.031047020107507706, -0.008311141282320023, + 0.01590598002076149, 0.060290511697530746, 0.01576240547001362, + 0.02187625877559185, 0.043800316751003265, 0.031320903450250626, + -0.06651055067777634, -0.021002093330025673, 0.0017026244895532727, + 0.043769653886556625, 0.10567088425159454, 0.05447397008538246, + 0.03038533218204975, -0.05251486599445343, 0.006278094835579395, + 0.036366380751132965, -0.04047612473368645, 0.028844239190220833, + 0.013570988550782204, 0.09016293287277222, -0.004787171259522438, + 0.01950586773455143, -0.05719347298145294, 0.061203483492136, + 0.01365590002387762, -0.014267620630562305, -0.024959318339824677, + -0.017525024712085724, 0.019045768305659294, 0.015455638989806175, + 0.018744226545095444, -0.012427862733602524, 0.02276451513171196, + 0.007119886577129364, 0.026162952184677124, 0.012351160869002342, + -0.015322038903832436, 0.030794212594628334, 0.027505625039339066, + -0.08122427016496658, -0.056815020740032196, 0.028780920431017876, + -0.05804600194096565, 0.029232045635581017, 0.09229617565870285, + 0.012091382406651974, -0.0043792868964374065, -0.059065081179142, + -0.08049105107784271, 0.010014526546001434, -0.00943391490727663, + -0.021799413487315178, 0.04687872156500816, -0.02472817152738571, + -0.05067834630608559, -0.052253175526857376, 0.008715991862118244, + -0.016799505800008774, -0.04549355432391167, -0.042793650180101395, + -0.011589781381189823, 0.02598593384027481, 0.02300962619483471, + 0.08437345176935196, -0.005691266618669033, -0.02574770711362362, + 0.03002670407295227, 0.004122992046177387, -0.00882661622017622, + 0.01909799687564373, 0.01614842191338539, 0.03799080476164818, + -0.04371938109397888, -0.007932179607450962, 0.007009848486632109, + 0.016692185774445534, 0.0018943115137517452, 0.04915632680058479, + 0.0459667444229126, 0.0007698669796809554, -0.0378001406788826, + 0.06169285625219345, 0.006474153138697147, -0.02551594004034996, + 0.029698481783270836, 0.017282065004110336, 0.009382328949868679, + 0.01234542764723301, 0.0458146370947361, -0.05962980166077614, + 0.053857170045375824, 0.018979759886860847, -0.06830580532550812, + 0.047222405672073364, 0.004947094712406397, 0.006394018419086933, + 0.06749635934829712, -0.004976056516170502, -0.044039297848939896, + 0.005775265861302614, -0.021700462326407433, -0.04911866411566734, + 0.029284657910466194, 0.0057007912546396255, -0.05791183561086655, + -0.036271147429943085, 0.027215855196118355, 0.000328205554978922, + 0.011796397157013416, 0.014365887269377708, -0.021071504801511765, + -0.10476838052272797, 0.03931521624326706, -0.01743827760219574, + -0.06444519758224487, 0.06594052165746689, -0.013583669438958168, + 0.023327721282839775, -0.040980175137519836, 0.021110322326421738, + -0.023542361333966255, 0.009244954213500023, -0.027327876538038254, + -0.0749981552362442, -0.05817040055990219, 0.033675868064165115, + -0.006241938564926386, 0.015267791226506233, -0.005319511517882347, + 0.007725033443421125, -0.04769749939441681, -0.013632228597998619, + -0.01671481318771839, 0.032588712871074677, 0.004994310438632965, + 0.009327513165771961, -0.014502864331007004, -0.021513426676392555, + -0.041920777410268784, 0.000736730988137424, -0.04845338687300682, + 0.05143485218286514, -0.00827883929014206, -0.05451349914073944, + -0.018751125782728195, -0.028143448755145073, -0.027438942342996597, + 0.0018979490268975496, 0.0029770773835480213, -0.052255671471357346, + 0.008816778659820557, -0.04883852228522301, -0.005153963807970285, + 0.03402094915509224, -0.033734340220689774, 0.0250434298068285, + -0.020967962220311165, -0.006132633425295353, -0.07388681918382645, + -0.03740931302309036, 0.025875000283122063, 0.023692401126027107, + 0.00612619100138545, -0.022321289405226707, -0.035878535360097885, + -0.028265153989195824, 0.05367949604988098, 0.011247171089053154, + -0.020050087943673134, 0.07264093309640884, 0.06775902211666107, + -0.00279391766525805, 0.0012111783726140857, 3.3324729884043336e-5, + -0.008147984743118286, -0.023408658802509308, + -0.0009141457267105579, -0.0390138104557991, -0.004014919511973858, + -0.037028394639492035, 0.027383441105484962, 0.06490057706832886, + -4.5131921069696546e-5, -0.039642851799726486, + -0.028655976057052612, -0.01269892230629921, -0.020463932305574417, + 0.009898904711008072, -0.006316003855317831, -0.049614571034908295, + -0.0066841524094343185, -0.026518603786826134, 0.008593079634010792, + -0.003961550071835518, 0.03302085027098656, -0.022351538762450218, + -0.036156121641397476, -0.044611334800720215, 0.021284740418195724, + -0.043991588056087494, 0.003917429596185684, -0.03190663829445839, + -0.0045702336356043816, -0.018654555082321167, 0.011742550879716873, + 0.05492732673883438, -0.02513030730187893, -0.038381800055503845, + 0.00968407653272152, 0.01182209700345993, 0.04846900328993797, + -0.028035493567585945, 0.026404736563563347, 0.041231412440538406, + -0.026241928339004517, 0.07928590476512909, 0.061520665884017944, + -0.021520253270864487, 0.05087057501077652, 0.06552571803331375, + 0.030050503090023994, -0.002204337390139699, -0.023577604442834854, + -0.0306601133197546, -0.04351067543029785, -0.005328315310180187, + -0.01005119550973177, -0.02883738838136196, -0.017970634624361992, + -0.034061819314956665, -0.09152771532535553, -0.012920234352350235, + -0.054658979177474976, 0.006490966305136681, -0.005702045746147633, + 0.013751029968261719, -0.015532087534666061, -0.04882064089179039, + -0.06106610968708992, -0.056693095713853836, 0.01626153290271759, + 0.002411112654954195, 0.04952302202582359, -0.056874003261327744, + 0.008347931317985058, -0.0002814563049469143, 0.05288821458816528, + 0.03517848998308182, -0.04989904537796974, -0.04409780725836754, + 0.008727666921913624, -0.019816864281892776, -0.044541459530591965, + 0.050811536610126495, -0.016411324962973595, -0.01473113615065813, + -0.0033838164526969194, -0.03577446565032005, -0.07643205672502518, + 0.01180155761539936, -0.0024393019266426563, -0.056923456490039825, + 0.024946225807070732, -0.03063299134373665, 0.05805474892258644, + -0.026666324585676193, 0.03865901008248329, 0.010353310965001583, + 0.03435457870364189, -0.03234299272298813, 0.015178441070020199, + 0.008014033548533916, 0.03488391637802124, -0.02252863347530365, + 0.0029658775310963392, -0.08343874663114548, -0.04188064485788345, + 0.006045202724635601, 0.02174469269812107, -0.010302801616489887, + 0.00877392664551735, 0.02698654495179653, 0.011882752180099487, + -0.03585568815469742, 0.02313760109245777, 0.03410400450229645, + -0.02344358153641224, -0.06733264774084091, 0.013219987042248249, + -0.03404899314045906, -0.04966249316930771, -0.014130531810224056, + -0.037043944001197815, 0.030784595757722855, -0.051190052181482315, + -0.01868394762277603, -0.017007501795887947, -0.0715363398194313, + 0.03724869713187218, -0.02842004969716072, 0.040288373827934265, + -0.007283140439540148, 0.00501842750236392, -0.05653573200106621, + 0.012398875318467617, -0.00045440212124958634, -0.1044396162033081, + -0.040580108761787415, 0.019638027995824814, -0.038515642285346985, + -0.07116620987653732, -0.0394931435585022, 0.029516225680708885, + 0.0009498042636550963, 0.04369128495454788, 0.003504744963720441, + 0.02860836312174797, -0.015155916102230549, -0.021670715883374214, + 0.0023334589786827564, 0.044438689947128296, 0.023793401196599007, + 0.039728522300720215, 0.07858478277921677, 0.01348444726318121, + 0.030168594792485237, -0.01906987838447094, 0.04200403019785881, + -0.007454268634319305, 0.017389994114637375, -0.018342411145567894, + -0.006242978852242231, 0.0008583567687310278 + ], + "similarity_top_k": 1 + }, + "ignoreArrayOrder": true + } + ] }, "response": { "status": 200, diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_2.json b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_2.json new file mode 100644 index 0000000000000..2006f51570175 --- /dev/null +++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-wordlift/tests/wiremock/mappings/vector-search/queries/request_2.json @@ -0,0 +1,300 @@ +{ + "request": { + "method": "POST", + "url": "/vector-search/queries", + "headers": { + "Accept": { + "equalTo": "application/json" + }, + "Authorization": { + "equalTo": "Key key43245932904328493223" + }, + "Content-Type": { + "equalTo": "application/json" + } + }, + "bodyPatterns": [ + { + "equalToJson": { + "fields": ["schema:url", "schema:headline", "schema:text"], + "filters": [ + { + "key": "rdf:type", + "operator": "EQ", + "value": "http://schema.org/Webpage" + } + ], + "query_embedding": [ + -0.02409953996539116, 0.007545656058937311, -0.010837538167834282, + -0.015247111208736897, 0.020510902628302574, 0.02260207198560238, + 0.03624352440237999, -0.02312600426375866, 0.03509214147925377, + 0.009039043448865414, -0.04192536696791649, 0.0646052435040474, + 0.05423989146947861, -0.006866314448416233, 0.032361097633838654, + -0.003253199392929673, 0.05170004814863205, -0.054772719740867615, + 0.008567743934690952, -0.008800626732409, -0.035540562123060226, + -0.0036080495920032263, -0.012353642843663692, + -0.049400247633457184, 0.0490705706179142, -0.017110642045736313, + 0.0434158556163311, 0.0047591072507202625, -0.04901169613003731, + 0.04313654080033302, 0.08862752467393875, -0.0305459126830101, + 0.031628116965293884, -0.0023637129925191402, + -0.0062087783589959145, 0.016893181949853897, 0.02979143150150776, + 0.016508951783180237, 0.02346094138920307, 0.0370299331843853, + -0.022708870470523834, -0.040380027145147324, 0.027732161805033684, + -0.03321043401956558, 0.028203414753079414, -0.03547737002372742, + -0.003607464488595724, -0.011886986903846264, 0.08876273781061172, + -0.035582806915044785, 0.01728527806699276, 0.033559445291757584, + 0.03006189689040184, -0.02276725322008133, -0.02573985420167446, + 0.018779633566737175, 0.018232546746730804, 0.04365724325180054, + 0.019853554666042328, -0.056149423122406006, -0.040290556848049164, + 0.055537160485982895, 0.01000349409878254, 0.042989231646060944, + 0.06469859182834625, -0.07268784195184708, 0.029832176864147186, + -0.00694637605920434, -0.019039496779441833, -0.028887171298265457, + 0.0611429437994957, -0.07661744952201843, 0.008372906595468521, + 0.07736742496490479, 0.01697900891304016, 0.00455197598785162, + -0.01660764403641224, 0.013384402729570866, 0.016262754797935486, + 0.04430804401636124, 0.05316875874996185, -0.017527703195810318, + -0.015531433746218681, 0.02427724562585354, 0.06978478282690048, + -0.0215159859508276, -0.0015113558620214462, -0.0005745171220041811, + -0.058096420019865036, 0.06534607708454132, 0.09807957708835602, + 0.03466407209634781, 0.02070295810699463, 0.06667006760835648, + 0.008454645052552223, 0.004790779203176498, -0.0021362053230404854, + 0.012438371777534485, -0.025263242423534393, 0.04503288492560387, + -0.014884737320244312, -0.009326054714620113, -0.03726727515459061, + -0.003038907190784812, 0.045574598014354706, 0.02176540531218052, + -0.022943837568163872, 0.03223365545272827, -0.0231216698884964, + -0.008260810747742653, -0.0838012844324112, -0.0041016764007508755, + -0.019858187064528465, -0.025245968252420425, -0.08231323212385178, + -0.0008271003025583923, 0.06744734942913055, -0.022615060210227966, + -0.036366865038871765, -0.007173497695475817, 0.023905375972390175, + -0.026212558150291443, 0.008525801822543144, 0.05790192261338234, + 0.03967795893549919, -0.019248196855187416, -0.03145810589194298, + -0.06143287941813469, -0.025088703259825706, 0.008470161817967892, + -0.019712727516889572, 0.0201752707362175, -0.024380167946219444, + -0.048689160495996475, 0.030698394402861595, 0.06519214808940887, + 0.006988975685089827, -0.030256111174821854, 0.015067224390804768, + -0.009249436669051647, 0.005610473453998566, 0.030035214498639107, + 0.0062820687890052795, -0.01272163912653923, 0.0031057705637067556, + -0.08999253064393997, 0.03721488639712334, -0.028286410495638847, + -0.041241131722927094, -0.011454920284450054, -0.024434737861156464, + 0.012364787049591541, 0.01152063999325037, 0.013034306466579437, + -0.015703829005360603, -0.07446613907814026, -0.035228293389081955, + 0.029677776619791985, -0.0008637132123112679, 0.03882776200771332, + -0.03331062197685242, 0.06260097771883011, -0.03643746301531792, + 0.03024205192923546, -0.04027588292956352, -0.006302598863840103, + -0.03876963630318642, 0.022751253098249435, 0.05228758603334427, + -0.00650898227468133, 0.00784189160913229, -0.02789650298655033, + -0.03584962338209152, -0.03039231151342392, 0.005157549399882555, + 0.038540806621313095, 0.016989191994071007, 0.002693451475352049, + 0.03680325672030449, -0.02772810310125351, 0.04701118916273117, + 0.00428611459210515, 0.008383968845009804, 0.01370905339717865, + -0.03272991627454758, 0.025361433625221252, 0.07640355080366135, + -0.09241847693920135, -0.018185008317232132, -0.0033845037687569857, + 0.016245491802692413, -0.010160512290894985, -0.01563146524131298, + -0.05825433135032654, 0.01485800463706255, -0.012620965018868446, + 0.012889757752418518, -0.07174727320671082, 0.0036001859698444605, + 0.0003497107536531985, -0.010015580803155899, -0.026276255026459694, + -0.054649949073791504, 0.0178300142288208, -0.03058459609746933, + -0.0046766879968345165, 0.02828553318977356, 0.004914792720228434, + 0.006221550516784191, 0.02017192542552948, 0.06884320825338364, + -0.005684717558324337, -0.03233734890818596, 0.028631558641791344, + -0.037736676633358, 0.012628731317818165, 0.011251229792833328, + -0.01216588169336319, -0.0449829027056694, 0.05112060159444809, + 0.0321006216108799, 0.004842250142246485, -0.028930027037858963, + -0.03781994432210922, 0.027888713404536247, -0.0020639710128307343, + -0.012764016166329384, 0.001849374850280583, -0.06090773642063141, + 0.024058688431978226, -0.07892243564128876, -0.022651061415672302, + 0.03663799911737442, -0.015678945928812027, 0.034871794283390045, + 0.02377290651202202, 0.01880395971238613, 0.005291060544550419, + 0.06636939197778702, 0.07682432979345322, 0.06366447359323502, + 0.045753464102745056, -0.021396778523921967, -0.06412482261657715, + -0.022975077852606773, -0.015396900475025177, -0.01382171455770731, + 0.04801446571946144, -0.044695984572172165, 0.010728792287409306, + -0.013164067640900612, -0.06426115334033966, 0.016030244529247284, + 0.01712176389992237, 0.003342918585985899, -0.024401094764471054, + -0.04742956534028053, 0.05057492479681969, -0.06447580456733704, + 0.003736401442438364, -0.0038414313457906246, -0.044975876808166504, + 0.01563481241464615, -0.0513794831931591, 0.03116164542734623, + 0.05475221946835518, -0.018825948238372803, -0.05825172737240791, + 0.026944046840071678, -0.01748691312968731, -0.015295912511646748, + 0.041276704519987106, 0.06435948610305786, 0.057869717478752136, + 0.007085255812853575, -0.0016246287850663066, -0.049611661583185196, + -0.02173301763832569, -0.03379219397902489, -0.010483022779226303, + -0.04382925480604172, -0.019825980067253113, -0.028733380138874054, + 0.00799755472689867, -0.010613085702061653, 0.03142009675502777, + 0.0993402749300003, 0.013524055480957031, 0.04996125027537346, + -0.05237795040011406, 0.00459002610296011, 0.03747300058603287, + -0.007390610408037901, 0.03675704449415207, 0.028937028720974922, + 0.0490734800696373, 0.07018966227769852, 0.02625582180917263, + -0.012768297456204891, -0.017309138551354408, 0.009925411082804203, + 0.002874719677492976, -0.03868229687213898, -0.011665035039186478, + 0.016460483893752098, -0.016710275784134865, 0.026092402637004852, + -0.029528427869081497, 0.027799755334854126, 0.00869158748537302, + 0.00814155675470829, 0.025196386501193047, 0.010806374251842499, + 0.007144266739487648, 0.009723981842398643, 0.040421262383461, + -0.014507091604173183, 0.00026819604681804776, 0.04148963838815689, + -0.011153719387948513, 0.011801899410784245, -0.04976220428943634, + 0.01822030358016491, -0.022088022902607918, 0.010638813488185406, + 0.050233982503414154, 0.03569936379790306, 0.024639304727315903, + 0.04990961775183678, 0.006694141309708357, 0.03368678316473961, + 0.05511782318353653, 0.006736745126545429, -0.03352257236838341, + 0.06660161167383194, 0.012697044759988785, -0.0414629764854908, + 0.00029493123292922974, -0.010526367463171482, + -0.015528022311627865, -0.024239003658294678, 0.014093346893787384, + -0.05118495970964432, -0.04236882925033569, 0.026792580261826515, + -0.04413650929927826, -0.008981052786111832, 0.02730277180671692, + 0.06500429660081863, 0.011842386797070503, -0.005152833182364702, + -0.011248798109591007, 0.0014251028187572956, -0.058844491839408875, + -0.03179366886615753, 0.014862962067127228, 0.0031557243783026934, + 0.006256013177335262, 0.022348832339048386, 0.018381774425506592, + 0.05182422697544098, -0.007802502252161503, 0.027966707944869995, + -0.015043427236378193, 0.06696246564388275, 0.06933777779340744, + 0.06229571998119354, 0.025468071922659874, 0.04187721014022827, + 0.030144240707159042, -0.04005308449268341, 0.018866809085011482, + -0.011857784353196621, 0.028774423524737358, 0.0008001378737390041, + 0.051755640655756, -0.0763072744011879, -0.035968881100416183, + -0.06542054563760757, -0.01490574050694704, 0.03574415668845177, + 0.008126146160066128, 0.02864741161465645, -3.158819890813902e-5, + 0.022258171811699867, 0.009770577773451805, -0.044916730374097824, + -0.07808850705623627, 0.0390549972653389, -0.01499694213271141, + 0.007959951646625996, -0.005930764134973288, -0.02100251242518425, + -0.029872389510273933, -0.012773459777235985, -0.043038900941610336, + 0.04595667123794556, -0.06141292303800583, 0.03035324439406395, + 0.00792359933257103, -0.020943107083439827, 0.02207357995212078, + 0.011123895645141602, 0.03734105825424194, -0.010610148310661316, + -0.052318185567855835, -0.015035799704492092, -0.025462502613663673, + 0.02562587894499302, -0.003998974338173866, 0.009129341691732407, + 0.07043135166168213, -0.004517677705734968, -0.08051703125238419, + 0.0014796281466260552, 0.05828672647476196, 0.02732127346098423, + -0.03499415144324303, -0.031034745275974274, -0.005702692084014416, + -0.040223129093647, -0.004799958318471909, 0.018715567886829376, + 0.04384651035070419, 0.013568849302828312, -0.011443322524428368, + 0.005295567214488983, 0.026522139087319374, 0.002658095210790634, + -0.03300406038761139, 0.018211429938673973, -0.013894042000174522, + 0.013989650644361973, 0.031047020107507706, -0.008311141282320023, + 0.01590598002076149, 0.060290511697530746, 0.01576240547001362, + 0.02187625877559185, 0.043800316751003265, 0.031320903450250626, + -0.06651055067777634, -0.021002093330025673, 0.0017026244895532727, + 0.043769653886556625, 0.10567088425159454, 0.05447397008538246, + 0.03038533218204975, -0.05251486599445343, 0.006278094835579395, + 0.036366380751132965, -0.04047612473368645, 0.028844239190220833, + 0.013570988550782204, 0.09016293287277222, -0.004787171259522438, + 0.01950586773455143, -0.05719347298145294, 0.061203483492136, + 0.01365590002387762, -0.014267620630562305, -0.024959318339824677, + -0.017525024712085724, 0.019045768305659294, 0.015455638989806175, + 0.018744226545095444, -0.012427862733602524, 0.02276451513171196, + 0.007119886577129364, 0.026162952184677124, 0.012351160869002342, + -0.015322038903832436, 0.030794212594628334, 0.027505625039339066, + -0.08122427016496658, -0.056815020740032196, 0.028780920431017876, + -0.05804600194096565, 0.029232045635581017, 0.09229617565870285, + 0.012091382406651974, -0.0043792868964374065, -0.059065081179142, + -0.08049105107784271, 0.010014526546001434, -0.00943391490727663, + -0.021799413487315178, 0.04687872156500816, -0.02472817152738571, + -0.05067834630608559, -0.052253175526857376, 0.008715991862118244, + -0.016799505800008774, -0.04549355432391167, -0.042793650180101395, + -0.011589781381189823, 0.02598593384027481, 0.02300962619483471, + 0.08437345176935196, -0.005691266618669033, -0.02574770711362362, + 0.03002670407295227, 0.004122992046177387, -0.00882661622017622, + 0.01909799687564373, 0.01614842191338539, 0.03799080476164818, + -0.04371938109397888, -0.007932179607450962, 0.007009848486632109, + 0.016692185774445534, 0.0018943115137517452, 0.04915632680058479, + 0.0459667444229126, 0.0007698669796809554, -0.0378001406788826, + 0.06169285625219345, 0.006474153138697147, -0.02551594004034996, + 0.029698481783270836, 0.017282065004110336, 0.009382328949868679, + 0.01234542764723301, 0.0458146370947361, -0.05962980166077614, + 0.053857170045375824, 0.018979759886860847, -0.06830580532550812, + 0.047222405672073364, 0.004947094712406397, 0.006394018419086933, + 0.06749635934829712, -0.004976056516170502, -0.044039297848939896, + 0.005775265861302614, -0.021700462326407433, -0.04911866411566734, + 0.029284657910466194, 0.0057007912546396255, -0.05791183561086655, + -0.036271147429943085, 0.027215855196118355, 0.000328205554978922, + 0.011796397157013416, 0.014365887269377708, -0.021071504801511765, + -0.10476838052272797, 0.03931521624326706, -0.01743827760219574, + -0.06444519758224487, 0.06594052165746689, -0.013583669438958168, + 0.023327721282839775, -0.040980175137519836, 0.021110322326421738, + -0.023542361333966255, 0.009244954213500023, -0.027327876538038254, + -0.0749981552362442, -0.05817040055990219, 0.033675868064165115, + -0.006241938564926386, 0.015267791226506233, -0.005319511517882347, + 0.007725033443421125, -0.04769749939441681, -0.013632228597998619, + -0.01671481318771839, 0.032588712871074677, 0.004994310438632965, + 0.009327513165771961, -0.014502864331007004, -0.021513426676392555, + -0.041920777410268784, 0.000736730988137424, -0.04845338687300682, + 0.05143485218286514, -0.00827883929014206, -0.05451349914073944, + -0.018751125782728195, -0.028143448755145073, -0.027438942342996597, + 0.0018979490268975496, 0.0029770773835480213, -0.052255671471357346, + 0.008816778659820557, -0.04883852228522301, -0.005153963807970285, + 0.03402094915509224, -0.033734340220689774, 0.0250434298068285, + -0.020967962220311165, -0.006132633425295353, -0.07388681918382645, + -0.03740931302309036, 0.025875000283122063, 0.023692401126027107, + 0.00612619100138545, -0.022321289405226707, -0.035878535360097885, + -0.028265153989195824, 0.05367949604988098, 0.011247171089053154, + -0.020050087943673134, 0.07264093309640884, 0.06775902211666107, + -0.00279391766525805, 0.0012111783726140857, 3.3324729884043336e-5, + -0.008147984743118286, -0.023408658802509308, + -0.0009141457267105579, -0.0390138104557991, -0.004014919511973858, + -0.037028394639492035, 0.027383441105484962, 0.06490057706832886, + -4.5131921069696546e-5, -0.039642851799726486, + -0.028655976057052612, -0.01269892230629921, -0.020463932305574417, + 0.009898904711008072, -0.006316003855317831, -0.049614571034908295, + -0.0066841524094343185, -0.026518603786826134, 0.008593079634010792, + -0.003961550071835518, 0.03302085027098656, -0.022351538762450218, + -0.036156121641397476, -0.044611334800720215, 0.021284740418195724, + -0.043991588056087494, 0.003917429596185684, -0.03190663829445839, + -0.0045702336356043816, -0.018654555082321167, 0.011742550879716873, + 0.05492732673883438, -0.02513030730187893, -0.038381800055503845, + 0.00968407653272152, 0.01182209700345993, 0.04846900328993797, + -0.028035493567585945, 0.026404736563563347, 0.041231412440538406, + -0.026241928339004517, 0.07928590476512909, 0.061520665884017944, + -0.021520253270864487, 0.05087057501077652, 0.06552571803331375, + 0.030050503090023994, -0.002204337390139699, -0.023577604442834854, + -0.0306601133197546, -0.04351067543029785, -0.005328315310180187, + -0.01005119550973177, -0.02883738838136196, -0.017970634624361992, + -0.034061819314956665, -0.09152771532535553, -0.012920234352350235, + -0.054658979177474976, 0.006490966305136681, -0.005702045746147633, + 0.013751029968261719, -0.015532087534666061, -0.04882064089179039, + -0.06106610968708992, -0.056693095713853836, 0.01626153290271759, + 0.002411112654954195, 0.04952302202582359, -0.056874003261327744, + 0.008347931317985058, -0.0002814563049469143, 0.05288821458816528, + 0.03517848998308182, -0.04989904537796974, -0.04409780725836754, + 0.008727666921913624, -0.019816864281892776, -0.044541459530591965, + 0.050811536610126495, -0.016411324962973595, -0.01473113615065813, + -0.0033838164526969194, -0.03577446565032005, -0.07643205672502518, + 0.01180155761539936, -0.0024393019266426563, -0.056923456490039825, + 0.024946225807070732, -0.03063299134373665, 0.05805474892258644, + -0.026666324585676193, 0.03865901008248329, 0.010353310965001583, + 0.03435457870364189, -0.03234299272298813, 0.015178441070020199, + 0.008014033548533916, 0.03488391637802124, -0.02252863347530365, + 0.0029658775310963392, -0.08343874663114548, -0.04188064485788345, + 0.006045202724635601, 0.02174469269812107, -0.010302801616489887, + 0.00877392664551735, 0.02698654495179653, 0.011882752180099487, + -0.03585568815469742, 0.02313760109245777, 0.03410400450229645, + -0.02344358153641224, -0.06733264774084091, 0.013219987042248249, + -0.03404899314045906, -0.04966249316930771, -0.014130531810224056, + -0.037043944001197815, 0.030784595757722855, -0.051190052181482315, + -0.01868394762277603, -0.017007501795887947, -0.0715363398194313, + 0.03724869713187218, -0.02842004969716072, 0.040288373827934265, + -0.007283140439540148, 0.00501842750236392, -0.05653573200106621, + 0.012398875318467617, -0.00045440212124958634, -0.1044396162033081, + -0.040580108761787415, 0.019638027995824814, -0.038515642285346985, + -0.07116620987653732, -0.0394931435585022, 0.029516225680708885, + 0.0009498042636550963, 0.04369128495454788, 0.003504744963720441, + 0.02860836312174797, -0.015155916102230549, -0.021670715883374214, + 0.0023334589786827564, 0.044438689947128296, 0.023793401196599007, + 0.039728522300720215, 0.07858478277921677, 0.01348444726318121, + 0.030168594792485237, -0.01906987838447094, 0.04200403019785881, + -0.007454268634319305, 0.017389994114637375, -0.018342411145567894, + -0.006242978852242231, 0.0008583567687310278 + ], + "similarity_top_k": 1 + }, + "ignoreArrayOrder": true + } + ] + }, + "response": { + "status": 200, + "bodyFileName": "vector-search/queries/response_1.json", + "headers": { + "Content-Type": "application/json" + } + } +} diff --git a/llama-index-legacy/llama_index/legacy/llms/bedrock.py b/llama-index-legacy/llama_index/legacy/llms/bedrock.py index 30465907162a3..3160490fa7728 100644 --- a/llama-index-legacy/llama_index/legacy/llms/bedrock.py +++ b/llama-index-legacy/llama_index/legacy/llms/bedrock.py @@ -109,7 +109,7 @@ def __init__( if context_size is None and model not in BEDROCK_FOUNDATION_LLMS: raise ValueError( "`context_size` argument not provided and" - "model provided refers to a non-foundation model." + " model provided refers to a non-foundation model." " Please specify the context_size" ) diff --git a/llama-index-packs/llama-index-packs-agents-llm-compiler/examples/llm_compiler.ipynb b/llama-index-packs/llama-index-packs-agents-llm-compiler/examples/llm_compiler.ipynb index 50baaabd1c233..8e2a8459bd263 100644 --- a/llama-index-packs/llama-index-packs-agents-llm-compiler/examples/llm_compiler.ipynb +++ b/llama-index-packs/llama-index-packs-agents-llm-compiler/examples/llm_compiler.ipynb @@ -7,7 +7,7 @@ "source": [ "# LLM Compiler Agent Cookbook\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "**NOTE**: Full credits to the [source repo for LLMCompiler](https://github.com/SqueezeAILab/LLMCompiler). A lot of our implementation was lifted from this repo (and adapted with LlamaIndex modules).\n", "\n", diff --git a/llama-index-packs/llama-index-packs-infer-retrieve-rerank/examples/infer_retrieve_rerank.ipynb b/llama-index-packs/llama-index-packs-infer-retrieve-rerank/examples/infer_retrieve_rerank.ipynb index 05de7db0a95fc..32bbe34a181c5 100644 --- a/llama-index-packs/llama-index-packs-infer-retrieve-rerank/examples/infer_retrieve_rerank.ipynb +++ b/llama-index-packs/llama-index-packs-infer-retrieve-rerank/examples/infer_retrieve_rerank.ipynb @@ -7,7 +7,7 @@ "source": [ "# Infer-Retrieve-Rerank Llama Pack\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This is our implementation of the paper [\"In-Context Learning for Extreme Multi-Label Classification](https://arxiv.org/pdf/2401.12178.pdf) by Oosterlinck et al.\n", "\n", diff --git a/llama-index-packs/llama-index-packs-llama-guard-moderator/examples/rag_moderator_llama_guard_pack.ipynb b/llama-index-packs/llama-index-packs-llama-guard-moderator/examples/rag_moderator_llama_guard_pack.ipynb index 27526e043a6d9..6ea3557f3c20f 100644 --- a/llama-index-packs/llama-index-packs-llama-guard-moderator/examples/rag_moderator_llama_guard_pack.ipynb +++ b/llama-index-packs/llama-index-packs-llama-guard-moderator/examples/rag_moderator_llama_guard_pack.ipynb @@ -6,7 +6,7 @@ "source": [ "# Safeguarding Your RAG Pipeline with LlamaGuardModeratorPack\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "\n", "This notebook shows how we can use LlamaGuardModeratorPack to safeguard the LLM inputs and outputs of a RAG pipeline using [Llama Guard](https://huggingface.co/meta-llama/LlamaGuard-7b). The RAG pipeline uses the following models:\n", diff --git a/llama-index-packs/llama-index-packs-multi-tenancy-rag/examples/multi_tenancy_rag.ipynb b/llama-index-packs/llama-index-packs-multi-tenancy-rag/examples/multi_tenancy_rag.ipynb index a9b1f3e5e5032..300bbd6522f55 100644 --- a/llama-index-packs/llama-index-packs-multi-tenancy-rag/examples/multi_tenancy_rag.ipynb +++ b/llama-index-packs/llama-index-packs-multi-tenancy-rag/examples/multi_tenancy_rag.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\"Open" + "\"Open" ] }, { diff --git a/llama-index-packs/llama-index-packs-neo4j-query-engine/examples/llama_packs_neo4j.ipynb b/llama-index-packs/llama-index-packs-neo4j-query-engine/examples/llama_packs_neo4j.ipynb index 447770d357d34..c99cec59bc823 100644 --- a/llama-index-packs/llama-index-packs-neo4j-query-engine/examples/llama_packs_neo4j.ipynb +++ b/llama-index-packs/llama-index-packs-neo4j-query-engine/examples/llama_packs_neo4j.ipynb @@ -6,7 +6,7 @@ "source": [ "# Llama Pack - Neo4j Query Engine\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This Llama Pack creates a Neo4j knowledge graph query engine, and executes its `query` function. This pack offers the option of creating multiple types of query engines for Neo4j knowledge graphs, namely:\n", "\n", diff --git a/llama-index-packs/llama-index-packs-node-parser-semantic-chunking/examples/semantic_chunking.ipynb b/llama-index-packs/llama-index-packs-node-parser-semantic-chunking/examples/semantic_chunking.ipynb index 583bc71430f74..f24e7ff5b4b22 100644 --- a/llama-index-packs/llama-index-packs-node-parser-semantic-chunking/examples/semantic_chunking.ipynb +++ b/llama-index-packs/llama-index-packs-node-parser-semantic-chunking/examples/semantic_chunking.ipynb @@ -7,7 +7,7 @@ "source": [ "# Semantic Chunker\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "\"Semantic chunking\" is a new concept proposed Greg Kamradt in his video tutorial on 5 levels of embedding chunking: https://youtu.be/8OJC21T2SL4?t=1933.\n", "\n", diff --git a/llama-index-packs/llama-index-packs-rag-fusion-query-pipeline/examples/rag_fusion_pipeline.ipynb b/llama-index-packs/llama-index-packs-rag-fusion-query-pipeline/examples/rag_fusion_pipeline.ipynb index d16ae9e60663b..3715d16f32154 100644 --- a/llama-index-packs/llama-index-packs-rag-fusion-query-pipeline/examples/rag_fusion_pipeline.ipynb +++ b/llama-index-packs/llama-index-packs-rag-fusion-query-pipeline/examples/rag_fusion_pipeline.ipynb @@ -7,7 +7,7 @@ "source": [ "# RAG Fusion Query Pipeline\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This notebook shows how to implement RAG Fusion using the LlamaIndex Query Pipeline syntax." ] diff --git a/llama-index-packs/llama-index-packs-self-rag/examples/self_rag.ipynb b/llama-index-packs/llama-index-packs-self-rag/examples/self_rag.ipynb index 5b576f10c2e48..bf539dfd30a90 100644 --- a/llama-index-packs/llama-index-packs-self-rag/examples/self_rag.ipynb +++ b/llama-index-packs/llama-index-packs-self-rag/examples/self_rag.ipynb @@ -7,7 +7,7 @@ "source": [ "# Simple Self RAG Notebook\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "This LlamaPack implements short form the [self-RAG paper by Akari et al.](https://arxiv.org/pdf/2310.11511.pdf).\n", "\n", diff --git a/llama-index-packs/llama-index-packs-tables/examples/chain_of_table.ipynb b/llama-index-packs/llama-index-packs-tables/examples/chain_of_table.ipynb index c60006af85962..0612a2940d58d 100644 --- a/llama-index-packs/llama-index-packs-tables/examples/chain_of_table.ipynb +++ b/llama-index-packs/llama-index-packs-tables/examples/chain_of_table.ipynb @@ -7,7 +7,7 @@ "source": [ "# Chain of Table Notebook\n", "\n", - "\"Open\n", + "\"Open\n", "\n", "In this notebook we highlight our implementation of the [\"Chain of Table\" paper by Wang et al.](https://arxiv.org/pdf/2401.04398v1.pdf).\n", "\n", diff --git a/llama-index-packs/llama-index-packs-tables/examples/mix_self_consistency.ipynb b/llama-index-packs/llama-index-packs-tables/examples/mix_self_consistency.ipynb index edfd6fc6a3c69..f30bf22b1df91 100644 --- a/llama-index-packs/llama-index-packs-tables/examples/mix_self_consistency.ipynb +++ b/llama-index-packs/llama-index-packs-tables/examples/mix_self_consistency.ipynb @@ -6,7 +6,7 @@ "source": [ "# Mix Self-Consistency Notebook \n", "\n", - "\n", + "\n", "\"Open\n", "\n", "In this notebook, we highlight the mix self-consistency method proposed in [\"Rethinking Tabular Data Understanding with Large Language Models\"](https://arxiv.org/pdf/2312.16702v1.pdf) paper by Liu et al.\n", diff --git a/llama-index-utils/llama-index-utils-workflow/llama_index/utils/workflow/draw.py b/llama-index-utils/llama-index-utils-workflow/llama_index/utils/workflow/draw.py index c0305e1522786..d9313af484777 100644 --- a/llama-index-utils/llama-index-utils-workflow/llama_index/utils/workflow/draw.py +++ b/llama-index-utils/llama-index-utils-workflow/llama_index/utils/workflow/draw.py @@ -1,4 +1,3 @@ -from pyvis.network import Network from typing import Optional from llama_index.core.workflow.events import StartEvent, StopEvent @@ -7,14 +6,17 @@ get_steps_from_class, get_steps_from_instance, ) +from llama_index.core.workflow.workflow import Workflow def draw_all_possible_flows( - workflow, + workflow: Workflow, filename: str = "workflow_all_flows.html", notebook: bool = False, ) -> None: """Draws all possible flows of the workflow.""" + from pyvis.network import Network + net = Network(directed=True, height="750px", width="100%") # Add the nodes + edge for stop events @@ -33,8 +35,9 @@ def draw_all_possible_flows( # If no steps are defined in the class, try to get them from the instance steps = get_steps_from_instance(workflow) - for step_name, step_func in get_steps_from_class(workflow).items(): - step_config: Optional[StepConfig] = getattr(step_func, "__step_config", None) + step_config: Optional[StepConfig] = None + for step_name, step_func in steps.items(): + step_config = getattr(step_func, "__step_config", None) if step_config is None: continue @@ -51,8 +54,8 @@ def draw_all_possible_flows( ) # Light green for events # Add edges from all steps - for step_name, step_func in get_steps_from_class(workflow).items(): - step_config: Optional[StepConfig] = getattr(step_func, "__step_config", None) + for step_name, step_func in steps.items(): + step_config = getattr(step_func, "__step_config", None) if step_config is None: continue @@ -68,15 +71,21 @@ def draw_all_possible_flows( def draw_most_recent_execution( - workflow, + workflow: Workflow, filename: str = "workflow_recent_execution.html", notebook: bool = False, ) -> None: """Draws the most recent execution of the workflow.""" + from pyvis.network import Network + net = Network(directed=True, height="750px", width="100%") # Add nodes and edges based on execution history - for i, (step, event) in enumerate(workflow._accepted_events): + existing_context = next(iter(workflow._contexts), None) + if existing_context is None: + raise ValueError("No runs found in workflow") + + for i, (step, event) in enumerate(existing_context._accepted_events): event_node = f"{event}_{i}" step_node = f"{step}_{i}" net.add_node( @@ -88,7 +97,7 @@ def draw_most_recent_execution( net.add_edge(event_node, step_node) if i > 0: - prev_step_node = f"{workflow._accepted_events[i - 1][0]}_{i - 1}" + prev_step_node = f"{existing_context._accepted_events[i - 1][0]}_{i - 1}" net.add_edge(prev_step_node, event_node) net.show(filename, notebook=notebook) diff --git a/llama-index-utils/llama-index-utils-workflow/poetry.lock b/llama-index-utils/llama-index-utils-workflow/poetry.lock deleted file mode 100644 index de3d74d429c4e..0000000000000 --- a/llama-index-utils/llama-index-utils-workflow/poetry.lock +++ /dev/null @@ -1,4492 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "aiohappyeyeballs" -version = "2.4.0" -description = "Happy Eyeballs for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, - {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, -] - -[[package]] -name = "aiohttp" -version = "3.10.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, - {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, - {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, - {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, - {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, - {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, - {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, - {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, - {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, - {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, - {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, - {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, - {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, - {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, - {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, - {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, -] - -[package.dependencies] -aiohappyeyeballs = ">=2.3.0" -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - -[[package]] -name = "astroid" -version = "2.13.5" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "astroid-2.13.5-py3-none-any.whl", hash = "sha256:6891f444625b6edb2ac798829b689e95297e100ddf89dbed5a8c610e34901501"}, - {file = "astroid-2.13.5.tar.gz", hash = "sha256:df164d5ac811b9f44105a72b8f9d5edfb7b5b2d7e979b04ea377a77b3229114a"}, -] - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "24.8.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, - {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, - {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, - {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, - {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, - {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, - {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, - {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, - {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, - {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, - {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, - {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, - {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, - {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, - {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, - {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, - {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, - {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, - {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, - {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, - {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, - {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, -] - -[package.dependencies] -click = ">=8.0.0" -ipython = {version = ">=7.8.0", optional = true, markers = "extra == \"jupyter\""} -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tokenize-rt = {version = ">=3.2.0", optional = true, markers = "extra == \"jupyter\""} -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.17.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "codespell" -version = "2.3.0" -description = "Codespell" -optional = false -python-versions = ">=3.8" -files = [ - {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, - {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""} - -[package.extras] -dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] -hard-encoding-detection = ["chardet"] -toml = ["tomli"] -types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "cryptography" -version = "43.0.0" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "dataclasses-json" -version = "0.6.7" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, - {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - -[[package]] -name = "debugpy" -version = "1.8.5" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "dirtyjson" -version = "1.0.8" -description = "JSON decoder for Python that can extract data from the muck" -optional = false -python-versions = "*" -files = [ - {file = "dirtyjson-1.0.8-py3-none-any.whl", hash = "sha256:125e27248435a58acace26d5c2c4c11a1c0de0a9c5124c5a94ba78e517d74f53"}, - {file = "dirtyjson-1.0.8.tar.gz", hash = "sha256:90ca4a18f3ff30ce849d100dcf4a003953c79d3a2348ef056f1d9c22231a25fd"}, -] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastjsonschema" -version = "2.20.0" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.15.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.6.1" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dev = ["pre-commit", "ruff"] -doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] -tqdm = ["tqdm"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "identify" -version = "2.6.0" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.2.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.10.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipython-8.10.0-py3-none-any.whl", hash = "sha256:b38c31e8fc7eff642fc7c597061fff462537cf2314e3225a19c906b7b0d8a345"}, - {file = "ipython-8.10.0.tar.gz", hash = "sha256:b13a1d6c1f5818bd388db53b7107d17454129a70de2b87481d555daede5eb49e"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" - -[package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] - -[[package]] -name = "ipywidgets" -version = "8.1.3" -description = "Jupyter interactive widgets" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.3-py3-none-any.whl", hash = "sha256:efafd18f7a142248f7cb0ba890a68b96abd4d6e88ddbda483c9130d12667eaf2"}, - {file = "ipywidgets-8.1.3.tar.gz", hash = "sha256:f5f9eeaae082b1823ce9eac2575272952f40d748893972956dc09700a6392d9c"}, -] - -[package.dependencies] -comm = ">=0.1.3" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.11,<3.1.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.11,<4.1.0" - -[package.extras] -test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "jsonpickle" -version = "3.2.2" -description = "Python library for serializing arbitrary object graphs into JSON" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpickle-3.2.2-py3-none-any.whl", hash = "sha256:87cd82d237fd72c5a34970e7222dddc0accc13fddf49af84111887ed9a9445aa"}, - {file = "jsonpickle-3.2.2.tar.gz", hash = "sha256:d425fd2b8afe9f5d7d57205153403fbf897782204437882a477e8eed60930f8c"}, -] - -[package.extras] -docs = ["furo", "rst.linker (>=1.9)", "sphinx"] -packaging = ["build", "twine"] -testing = ["bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-benchmark", "pytest-benchmark[histogram]", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-ruff (>=0.2.1)", "scikit-learn", "scipy", "scipy (>=1.9.3)", "simplejson", "sqlalchemy", "ujson"] - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.31.0" - -[[package]] -name = "jupyter" -version = "1.0.0" -description = "Jupyter metapackage. Install all the Jupyter components in one go." -optional = false -python-versions = "*" -files = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, -] - -[package.dependencies] -ipykernel = "*" -ipywidgets = "*" -jupyter-console = "*" -nbconvert = "*" -notebook = "*" -qtconsole = "*" - -[[package]] -name = "jupyter-client" -version = "8.6.2" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-console" -version = "6.6.3" -description = "Jupyter terminal console" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, - {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, -] - -[package.dependencies] -ipykernel = ">=6.14" -ipython = "*" -jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -prompt-toolkit = ">=3.0.30" -pygments = "*" -pyzmq = ">=17" -traitlets = ">=5.4" - -[package.extras] -test = ["flaky", "pexpect", "pytest"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.10.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.14.2" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, - {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = ">=5.0" -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, - {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "4.2.4" -description = "JupyterLab computational environment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab-4.2.4-py3-none-any.whl", hash = "sha256:807a7ec73637744f879e112060d4b9d9ebe028033b7a429b2d1f4fc523d00245"}, - {file = "jupyterlab-4.2.4.tar.gz", hash = "sha256:343a979fb9582fd08c8511823e320703281cd072a0049bcdafdc7afeda7f2537"}, -] - -[package.dependencies] -async-lru = ">=1.0.0" -httpx = ">=0.25.0" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -importlib-resources = {version = ">=1.4", markers = "python_version < \"3.9\""} -ipykernel = ">=6.5.0" -jinja2 = ">=3.0.3" -jupyter-core = "*" -jupyter-lsp = ">=2.0.0" -jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2" -packaging = "*" -setuptools = ">=40.1.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} -tornado = ">=6.2.0" -traitlets = "*" - -[package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] -test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] -upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.27.3" -description = "A set of server components for JupyterLab and JupyterLab like applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, - {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, -] - -[package.dependencies] -babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.18.0" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.31" - -[package.extras] -docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.11" -description = "Jupyter interactive widgets for JupyterLab" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.11-py3-none-any.whl", hash = "sha256:78287fd86d20744ace330a61625024cf5521e1c012a352ddc0a3cdc2348becd0"}, - {file = "jupyterlab_widgets-3.0.11.tar.gz", hash = "sha256:dd5ac679593c969af29c9bed054c24f26842baa51352114736756bc035deee27"}, -] - -[[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, -] - -[[package]] -name = "llama-index-core" -version = "0.11.0" -description = "Interface between LLMs and your data" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_core-0.11.0-py3-none-any.whl", hash = "sha256:f1242d4aaf9ebe7b297ad28257429010b79944f54ac8c4938b06a882fff3fd1e"}, - {file = "llama_index_core-0.11.0.tar.gz", hash = "sha256:9cacca2f48d6054677fad16e6cc1e5b00226908a3282d16c717dd728a2894855"}, -] - -[package.dependencies] -aiohttp = ">=3.8.6,<4.0.0" -dataclasses-json = "*" -deprecated = ">=1.2.9.3" -dirtyjson = ">=1.0.8,<2.0.0" -fsspec = ">=2023.5.0" -httpx = "*" -nest-asyncio = ">=1.5.8,<2.0.0" -networkx = ">=3.0" -nltk = ">=3.8.1,<3.9 || >3.9" -numpy = "<2.0.0" -pillow = ">=9.0.0" -pydantic = ">=2.0.0,<3.0.0" -PyYAML = ">=6.0.1" -requests = ">=2.31.0" -SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} -tenacity = ">=8.2.0,<8.4.0 || >8.4.0,<9.0.0" -tiktoken = ">=0.3.3" -tqdm = ">=4.66.1,<5.0.0" -typing-extensions = ">=4.5.0" -typing-inspect = ">=0.8.0" -wrapt = "*" - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.22.0" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, - {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "mypy" -version = "0.991" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, -] - -[package.dependencies] -mypy-extensions = ">=0.4.3" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.4" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "3.1" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.8" -files = [ - {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, - {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, -] - -[package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "nltk" -version = "3.9.1" -description = "Natural Language Toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, - {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, -] - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "notebook" -version = "7.2.1" -description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "notebook-7.2.1-py3-none-any.whl", hash = "sha256:f45489a3995746f2195a137e0773e2130960b51c9ac3ce257dbc2705aab3a6ca"}, - {file = "notebook-7.2.1.tar.gz", hash = "sha256:4287b6da59740b32173d01d641f763d292f49c30e7a51b89c46ba8473126341e"}, -] - -[package.dependencies] -jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.2.0,<4.3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2,<0.3" -tornado = ">=6.2.0" - -[package.extras] -dev = ["hatch", "pre-commit"] -docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -description = "A shim layer for notebook traits and config" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, - {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, -] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] - -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - -[[package]] -name = "pillow" -version = "10.4.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "3.2.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pre_commit-3.2.0-py2.py3-none-any.whl", hash = "sha256:f712d3688102e13c8e66b7d7dbd8934a6dda157e58635d89f7d6fecdca39ce8a"}, - {file = "pre_commit-3.2.0.tar.gz", hash = "sha256:818f0d998059934d0f81bb3667e3ccdc32da6ed7ccaac33e43dc231561ddaaa9"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prometheus-client" -version = "0.20.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.8.2" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.20.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pylint" -version = "2.15.10" -description = "python code static checker" -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"}, - {file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"}, -] - -[package.dependencies] -astroid = ">=2.12.13,<=2.14.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, -] -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pytest" -version = "7.2.1" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, - {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.11.1" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, - {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, -] - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pyvis" -version = "0.3.2" -description = "A Python network graph visualization library" -optional = false -python-versions = ">3.6" -files = [ - {file = "pyvis-0.3.2-py3-none-any.whl", hash = "sha256:5720c4ca8161dc5d9ab352015723abb7a8bb8fb443edeb07f7a322db34a97555"}, -] - -[package.dependencies] -ipython = ">=5.3.0" -jinja2 = ">=2.9.6" -jsonpickle = ">=1.4.1" -networkx = ">=1.11" - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pywinpty" -version = "2.0.13" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.1.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, - {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, - {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, - {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4"}, - {file = "pyzmq-26.1.0-cp313-cp313-win32.whl", hash = "sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win32.whl", hash = "sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c"}, - {file = "pyzmq-26.1.0-cp38-cp38-win32.whl", hash = "sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741"}, - {file = "pyzmq-26.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8"}, - {file = "pyzmq-26.1.0-cp39-cp39-win32.whl", hash = "sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c"}, - {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qtconsole" -version = "5.5.2" -description = "Jupyter Qt console" -optional = false -python-versions = ">=3.8" -files = [ - {file = "qtconsole-5.5.2-py3-none-any.whl", hash = "sha256:42d745f3d05d36240244a04e1e1ec2a86d5d9b6edb16dbdef582ccb629e87e0b"}, - {file = "qtconsole-5.5.2.tar.gz", hash = "sha256:6b5fb11274b297463706af84dcbbd5c92273b1f619e6d25d08874b0a88516989"}, -] - -[package.dependencies] -ipykernel = ">=4.1" -jupyter-client = ">=4.1" -jupyter-core = "*" -packaging = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.4.0" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.4.1" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -optional = false -python-versions = ">=3.7" -files = [ - {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, - {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2024.7.24" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, - {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, - {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, - {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, - {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, - {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, - {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, - {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, - {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, - {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, - {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, - {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rpds-py" -version = "0.20.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, -] - -[[package]] -name = "ruff" -version = "0.0.292" -description = "An extremely fast Python linter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, - {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, - {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, - {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, - {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, - {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, -] - -[[package]] -name = "send2trash" -version = "1.8.3" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, - {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "setuptools" -version = "72.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, -] - -[package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "soupsieve" -version = "2.5" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.32" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, - {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, - {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "tenacity" -version = "8.5.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, - {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "tiktoken" -version = "0.7.0" -description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, - {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, - {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, - {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, - {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, - {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, - {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, - {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, -] - -[package.dependencies] -regex = ">=2022.1.18" -requests = ">=2.26.0" - -[package.extras] -blobfile = ["blobfile (>=2)"] - -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - -[[package]] -name = "tokenize-rt" -version = "6.0.0" -description = "A wrapper around the stdlib `tokenize` which roundtrips." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tokenize_rt-6.0.0-py2.py3-none-any.whl", hash = "sha256:d4ff7ded2873512938b4f8cbb98c9b07118f01d30ac585a30d7a88353ca36d22"}, - {file = "tokenize_rt-6.0.0.tar.gz", hash = "sha256:b9711bdfc51210211137499b5e355d3de5ec88a85d2025c520cbb921b5194367"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomlkit" -version = "0.13.0" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, - {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, -] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "tree-sitter" -version = "0.21.3" -description = "Python bindings for the Tree-Sitter parsing library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tree-sitter-0.21.3.tar.gz", hash = "sha256:b5de3028921522365aa864d95b3c41926e0ba6a85ee5bd000e10dc49b0766988"}, - {file = "tree_sitter-0.21.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:351f302b6615230c9dac9829f0ba20a94362cd658206ca9a7b2d58d73373dfb0"}, - {file = "tree_sitter-0.21.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:766e79ae1e61271e7fdfecf35b6401ad9b47fc07a0965ad78e7f97fddfdf47a6"}, - {file = "tree_sitter-0.21.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c4d3d4d4b44857e87de55302af7f2d051c912c466ef20e8f18158e64df3542a"}, - {file = "tree_sitter-0.21.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84eedb06615461b9e2847be7c47b9c5f2195d7d66d31b33c0a227eff4e0a0199"}, - {file = "tree_sitter-0.21.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d33ea425df8c3d6436926fe2991429d59c335431bf4e3c71e77c17eb508be5a"}, - {file = "tree_sitter-0.21.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fae1ee0ff6d85e2fd5cd8ceb9fe4af4012220ee1e4cbe813305a316caf7a6f63"}, - {file = "tree_sitter-0.21.3-cp310-cp310-win_amd64.whl", hash = "sha256:bb41be86a987391f9970571aebe005ccd10222f39c25efd15826583c761a37e5"}, - {file = "tree_sitter-0.21.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54b22c3c2aab3e3639a4b255d9df8455da2921d050c4829b6a5663b057f10db5"}, - {file = "tree_sitter-0.21.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab6e88c1e2d5e84ff0f9e5cd83f21b8e5074ad292a2cf19df3ba31d94fbcecd4"}, - {file = "tree_sitter-0.21.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3fd34ed4cd5db445bc448361b5da46a2a781c648328dc5879d768f16a46771"}, - {file = "tree_sitter-0.21.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fabc7182f6083269ce3cfcad202fe01516aa80df64573b390af6cd853e8444a1"}, - {file = "tree_sitter-0.21.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f874c3f7d2a2faf5c91982dc7d88ff2a8f183a21fe475c29bee3009773b0558"}, - {file = "tree_sitter-0.21.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee61ee3b7a4eedf9d8f1635c68ba4a6fa8c46929601fc48a907c6cfef0cfbcb2"}, - {file = "tree_sitter-0.21.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b7256c723642de1c05fbb776b27742204a2382e337af22f4d9e279d77df7aa2"}, - {file = "tree_sitter-0.21.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:669b3e5a52cb1e37d60c7b16cc2221c76520445bb4f12dd17fd7220217f5abf3"}, - {file = "tree_sitter-0.21.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2aa2a5099a9f667730ff26d57533cc893d766667f4d8a9877e76a9e74f48f0d3"}, - {file = "tree_sitter-0.21.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3e06ae2a517cf6f1abb682974f76fa760298e6d5a3ecf2cf140c70f898adf0"}, - {file = "tree_sitter-0.21.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af992dfe08b4fefcfcdb40548d0d26d5d2e0a0f2d833487372f3728cd0772b48"}, - {file = "tree_sitter-0.21.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c7cbab1dd9765138505c4a55e2aa857575bac4f1f8a8b0457744a4fefa1288e6"}, - {file = "tree_sitter-0.21.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1e66aeb457d1529370fcb0997ae5584c6879e0e662f1b11b2f295ea57e22f54"}, - {file = "tree_sitter-0.21.3-cp312-cp312-win_amd64.whl", hash = "sha256:013c750252dc3bd0e069d82e9658de35ed50eecf31c6586d0de7f942546824c5"}, - {file = "tree_sitter-0.21.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4986a8cb4acebd168474ec2e5db440e59c7888819b3449a43ce8b17ed0331b07"}, - {file = "tree_sitter-0.21.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e217fee2e7be7dbce4496caa3d1c466977d7e81277b677f954d3c90e3272ec2"}, - {file = "tree_sitter-0.21.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32a88afff4f2bc0f20632b0a2aa35fa9ae7d518f083409eca253518e0950929"}, - {file = "tree_sitter-0.21.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3652ac9e47cdddf213c5d5d6854194469097e62f7181c0a9aa8435449a163a9"}, - {file = "tree_sitter-0.21.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:60b4df3298ff467bc01e2c0f6c2fb43aca088038202304bf8e41edd9fa348f45"}, - {file = "tree_sitter-0.21.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:00e4d0c99dff595398ef5e88a1b1ddd53adb13233fb677c1fd8e497fb2361629"}, - {file = "tree_sitter-0.21.3-cp38-cp38-win_amd64.whl", hash = "sha256:50c91353a26946e4dd6779837ecaf8aa123aafa2d3209f261ab5280daf0962f5"}, - {file = "tree_sitter-0.21.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b17b8648b296ccc21a88d72ca054b809ee82d4b14483e419474e7216240ea278"}, - {file = "tree_sitter-0.21.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f2f057fd01d3a95cbce6794c6e9f6db3d376cb3bb14e5b0528d77f0ec21d6478"}, - {file = "tree_sitter-0.21.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:839759de30230ffd60687edbb119b31521d5ac016749358e5285816798bb804a"}, - {file = "tree_sitter-0.21.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df40aa29cb7e323898194246df7a03b9676955a0ac1f6bce06bc4903a70b5f7"}, - {file = "tree_sitter-0.21.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1d9be27dde007b569fa78ff9af5fe40d2532c998add9997a9729e348bb78fa59"}, - {file = "tree_sitter-0.21.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c4ac87735e6f98fe085244c7c020f0177d13d4c117db72ba041faa980d25d69d"}, - {file = "tree_sitter-0.21.3-cp39-cp39-win_amd64.whl", hash = "sha256:fbbd137f7d9a5309fb4cb82e2c3250ba101b0dd08a8abdce815661e6cf2cbc19"}, -] - -[[package]] -name = "tree-sitter-languages" -version = "1.10.2" -description = "Binary Python wheels for all tree sitter languages." -optional = false -python-versions = "*" -files = [ - {file = "tree_sitter_languages-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5580348f0b20233b1d5431fa178ccd3d07423ca4a3275df02a44608fd72344b9"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:103c7466644486b1e9e03850df46fc6aa12f13ca636c74f173270276220ac80b"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d13db84511c6f1a7dc40383b66deafa74dabd8b877e3d65ab253f3719eccafd6"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57adfa32be7e465b54aa72f915f6c78a2b66b227df4f656b5d4fbd1ca7a92b3f"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6385e033e460ceb8f33f3f940335f422ef2b763700a04f0089391a68b56153"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dfa3f38cc5381c5aba01dd7494f59b8a9050e82ff6e06e1233e3a0cbae297e3c"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9f195155acf47f8bc5de7cee46ecd07b2f5697f007ba89435b51ef4c0b953ea5"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2de330e2ac6d7426ca025a3ec0f10d5640c3682c1d0c7702e812dcfb44b58120"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-win32.whl", hash = "sha256:c9731cf745f135d9770eeba9bb4e2ff4dabc107b5ae9b8211e919f6b9100ea6d"}, - {file = "tree_sitter_languages-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:6dd75851c41d0c3c4987a9b7692d90fa8848706c23115669d8224ffd6571e357"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7eb7d7542b2091c875fe52719209631fca36f8c10fa66970d2c576ae6a1b8289"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b41bcb00974b1c8a1800c7f1bb476a1d15a0463e760ee24872f2d53b08ee424"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f370cd7845c6c81df05680d5bd96db8a99d32b56f4728c5d05978911130a853"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1dc195c88ef4c72607e112a809a69190e096a2e5ebc6201548b3e05fdd169ad"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae34ac314a7170be24998a0f994c1ac80761d8d4bd126af27ee53a023d3b849"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:01b5742d5f5bd675489486b582bd482215880b26dde042c067f8265a6e925d9c"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ab1cbc46244d34fd16f21edaa20231b2a57f09f092a06ee3d469f3117e6eb954"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b1149e7467a4e92b8a70e6005fe762f880f493cf811fc003554b29f04f5e7c8"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-win32.whl", hash = "sha256:049276343962f4696390ee555acc2c1a65873270c66a6cbe5cb0bca83bcdf3c6"}, - {file = "tree_sitter_languages-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:7f3fdd468a577f04db3b63454d939e26e360229b53c80361920aa1ebf2cd7491"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c0f4c8b2734c45859edc7fcaaeaab97a074114111b5ba51ab4ec7ed52104763c"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:eecd3c1244ac3425b7a82ba9125b4ddb45d953bbe61de114c0334fd89b7fe782"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15db3c8510bc39a80147ee7421bf4782c15c09581c1dc2237ea89cefbd95b846"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92c6487a6feea683154d3e06e6db68c30e0ae749a7ce4ce90b9e4e46b78c85c7"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2f1cd1d1bdd65332f9c2b67d49dcf148cf1ded752851d159ac3e5ee4f4d260"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:976c8039165b8e12f17a01ddee9f4e23ec6e352b165ad29b44d2bf04e2fbe77e"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:dafbbdf16bf668a580902e1620f4baa1913e79438abcce721a50647564c687b9"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1aeabd3d60d6d276b73cd8f3739d595b1299d123cc079a317f1a5b3c5461e2ca"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-win32.whl", hash = "sha256:fab8ee641914098e8933b87ea3d657bea4dd00723c1ee7038b847b12eeeef4f5"}, - {file = "tree_sitter_languages-1.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e606430d736367e5787fa5a7a0c5a1ec9b85eded0b3596bbc0d83532a40810b"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:838d5b48a7ed7a17658721952c77fda4570d2a069f933502653b17e15a9c39c9"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:987b3c71b1d278c2889e018ee77b8ee05c384e2e3334dec798f8b611c4ab2d1e"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faa00abcb2c819027df58472da055d22fa7dfcb77c77413d8500c32ebe24d38b"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e102fbbf02322d9201a86a814e79a9734ac80679fdb9682144479044f401a73"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0b87cf1a7b03174ba18dfd81582be82bfed26803aebfe222bd20e444aba003"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c0f1b9af9cb67f0b942b020da9fdd000aad5e92f2383ae0ba7a330b318d31912"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a4076c921f7a4d31e643843de7dfe040b65b63a238a5aa8d31d93aabe6572aa"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-win32.whl", hash = "sha256:fa6391a3a5d83d32db80815161237b67d70576f090ce5f38339206e917a6f8bd"}, - {file = "tree_sitter_languages-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:55649d3f254585a064121513627cf9788c1cfdadbc5f097f33d5ba750685a4c0"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6f85d1edaa2d22d80d4ea5b6d12b95cf3644017b6c227d0d42854439e02e8893"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d78feed4a764ef3141cb54bf00fe94d514d8b6e26e09423e23b4c616fcb7938c"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1aca27531f9dd5308637d76643372856f0f65d0d28677d1bcf4211e8ed1ad0"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1031ea440dafb72237437d754eff8940153a3b051e3d18932ac25e75ce060a15"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99d3249beaef2c9fe558ecc9a97853c260433a849dcc68266d9770d196c2e102"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:59a4450f262a55148fb7e68681522f0c2a2f6b7d89666312a2b32708d8f416e1"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ce74eab0e430370d5e15a96b6c6205f93405c177a8b2e71e1526643b2fb9bab1"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9b4dd2b6b3d24c85dffe33d6c343448869eaf4f41c19ddba662eb5d65d8808f4"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-win32.whl", hash = "sha256:92d734fb968fe3927a7596d9f0459f81a8fa7b07e16569476b28e27d0d753348"}, - {file = "tree_sitter_languages-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:46a13f7d38f2eeb75f7cf127d1201346093748c270d686131f0cbc50e42870a1"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f8c6a936ae99fdd8857e91f86c11c2f5e507ff30631d141d98132bb7ab2c8638"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c283a61423f49cdfa7b5a5dfbb39221e3bd126fca33479cd80749d4d7a6b7349"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e60be6bdcff923386a54a5edcb6ff33fc38ab0118636a762024fa2bc98de55"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c00069f9575bd831eabcce2cdfab158dde1ed151e7e5614c2d985ff7d78a7de1"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475ff53203d8a43ccb19bb322fa2fb200d764001cc037793f1fadd714bb343da"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26fe7c9c412e4141dea87ea4b3592fd12e385465b5bdab106b0d5125754d4f60"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8fed27319957458340f24fe14daad467cd45021da034eef583519f83113a8c5e"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3657a491a7f96cc75a3568ddd062d25f3be82b6a942c68801a7b226ff7130181"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-win32.whl", hash = "sha256:33f7d584d01a7a3c893072f34cfc64ec031f3cfe57eebc32da2f8ac046e101a7"}, - {file = "tree_sitter_languages-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:1b944af3ee729fa70fc8ae82224a9ff597cdb63addea084e0ea2fa2b0ec39bb7"}, -] - -[package.dependencies] -tree-sitter = "*" - -[[package]] -name = "types-cffi" -version = "1.16.0.20240331" -description = "Typing stubs for cffi" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, - {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, -] - -[package.dependencies] -types-setuptools = "*" - -[[package]] -name = "types-deprecated" -version = "1.2.9.20240311" -description = "Typing stubs for Deprecated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-Deprecated-1.2.9.20240311.tar.gz", hash = "sha256:0680e89989a8142707de8103f15d182445a533c1047fd9b7e8c5459101e9b90a"}, - {file = "types_Deprecated-1.2.9.20240311-py3-none-any.whl", hash = "sha256:d7793aaf32ff8f7e49a8ac781de4872248e0694c4b75a7a8a186c51167463f9d"}, -] - -[[package]] -name = "types-docutils" -version = "0.21.0.20240724" -description = "Typing stubs for docutils" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-docutils-0.21.0.20240724.tar.gz", hash = "sha256:29ff7e27660f4fe76ea61d7e54d05ca3ce3b733ca9e8e8721e0fa587dbc10489"}, - {file = "types_docutils-0.21.0.20240724-py3-none-any.whl", hash = "sha256:bf51c6c488d23c0412f9b3ba10686fb1a6cb0b957ef04b45128d8a55c79ebb00"}, -] - -[[package]] -name = "types-protobuf" -version = "4.25.0.20240417" -description = "Typing stubs for protobuf" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-protobuf-4.25.0.20240417.tar.gz", hash = "sha256:c34eff17b9b3a0adb6830622f0f302484e4c089f533a46e3f147568313544352"}, - {file = "types_protobuf-4.25.0.20240417-py3-none-any.whl", hash = "sha256:e9b613227c2127e3d4881d75d93c93b4d6fd97b5f6a099a0b654a05351c8685d"}, -] - -[[package]] -name = "types-pyopenssl" -version = "24.1.0.20240722" -description = "Typing stubs for pyOpenSSL" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, - {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-cffi = "*" - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20240316" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240808" -description = "Typing stubs for PyYAML" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, -] - -[[package]] -name = "types-redis" -version = "4.5.5.0" -description = "Typing stubs for redis" -optional = false -python-versions = "*" -files = [ - {file = "types-redis-4.5.5.0.tar.gz", hash = "sha256:26547d91f011a4024375d9216cd4d917b4678c984201d46f72c604526c138523"}, - {file = "types_redis-4.5.5.0-py3-none-any.whl", hash = "sha256:c7132e0cedeb52a83d20138c0440721bfae89cd2027c1ef57a294b56dfde4ee8"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - -[[package]] -name = "types-requests" -version = "2.28.11.8" -description = "Typing stubs for requests" -optional = false -python-versions = "*" -files = [ - {file = "types-requests-2.28.11.8.tar.gz", hash = "sha256:e67424525f84adfbeab7268a159d3c633862dafae15c5b19547ce1b55954f0a3"}, - {file = "types_requests-2.28.11.8-py3-none-any.whl", hash = "sha256:61960554baca0008ae7e2db2bd3b322ca9a144d3e80ce270f5fb640817e40994"}, -] - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-setuptools" -version = "67.1.0.0" -description = "Typing stubs for setuptools" -optional = false -python-versions = "*" -files = [ - {file = "types-setuptools-67.1.0.0.tar.gz", hash = "sha256:162a39d22e3a5eb802197c84f16b19e798101bbd33d9437837fbb45627da5627"}, - {file = "types_setuptools-67.1.0.0-py3-none-any.whl", hash = "sha256:5bd7a10d93e468bfcb10d24cb8ea5e12ac4f4ac91267293959001f1448cf0619"}, -] - -[package.dependencies] -types-docutils = "*" - -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" -files = [ - {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, - {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "virtualenv" -version = "20.26.3" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "webcolors" -version = "24.8.0" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.8" -files = [ - {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, - {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["coverage[toml]"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.11" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ - {file = "widgetsnbextension-4.0.11-py3-none-any.whl", hash = "sha256:55d4d6949d100e0d08b94948a42efc3ed6dfdc0e9468b2c4b128c9a2ce3a7a36"}, - {file = "widgetsnbextension-4.0.11.tar.gz", hash = "sha256:8b22a8f1910bfd188e596fe7fc05dcbd87e810c8a4ba010bdb3da86637398474"}, -] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zipp" -version = "3.19.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.8.1,<4.0" -content-hash = "b8531d7c1d8a1f6b6b06be0fc5d0b7e1be8cba95f1a21e3fa4e207e5bb3eace1" diff --git a/llama-index-utils/llama-index-utils-workflow/pyproject.toml b/llama-index-utils/llama-index-utils-workflow/pyproject.toml index 2bd5703dd8138..3efbfa4bf1ccd 100644 --- a/llama-index-utils/llama-index-utils-workflow/pyproject.toml +++ b/llama-index-utils/llama-index-utils-workflow/pyproject.toml @@ -24,7 +24,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-utils-workflow" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/pants.toml b/pants.toml index fd2d071fcf9ef..12c7194790441 100644 --- a/pants.toml +++ b/pants.toml @@ -19,7 +19,7 @@ unmatched_build_file_globs = "error" config = "./pyproject.toml" [python] -interpreter_constraints = ["==3.10.*", "==3.8.*", "==3.9.*"] +interpreter_constraints = ["==3.10.*", "==3.11.*", "==3.12.*", "==3.9.*"] [python-infer] ambiguity_resolution = "by_source_root" diff --git a/poetry.lock b/poetry.lock index da26d4df56663..47b427ca90793 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -383,89 +383,89 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -645,38 +645,38 @@ test = ["pytest"] [[package]] name = "cryptography" -version = "43.0.0" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -689,7 +689,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -841,13 +841,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] @@ -869,19 +869,19 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "frozenlist" @@ -971,13 +971,13 @@ files = [ [[package]] name = "fsspec" -version = "2024.6.1" +version = "2024.9.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, + {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, + {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, ] [package.extras] @@ -1027,69 +1027,77 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.0" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, + {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, + {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, + {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, + {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, + {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, + {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, + {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, + {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, + {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, + {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, + {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, + {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, + {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, + {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, + {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, + {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, ] [package.extras] @@ -1098,13 +1106,13 @@ test = ["objgraph", "psutil"] [[package]] name = "griffe" -version = "1.2.0" +version = "1.3.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-1.2.0-py3-none-any.whl", hash = "sha256:a8b2fcb1ecdc5a412e646b0b4375eb20a5d2eac3a11dd8c10c56967a4097663c"}, - {file = "griffe-1.2.0.tar.gz", hash = "sha256:1c9f6ef7455930f3f9b0c4145a961c90385d1e2cbc496f7796fbff560ec60d31"}, + {file = "griffe-1.3.1-py3-none-any.whl", hash = "sha256:940aeb630bc3054b4369567f150b6365be6f11eef46b0ed8623aea96e6d17b19"}, + {file = "griffe-1.3.1.tar.gz", hash = "sha256:3f86a716b631a4c0f96a43cb75d05d3c85975003c20540426c0eba3b0581c56a"}, ] [package.dependencies] @@ -1145,13 +1153,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1166,6 +1174,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" @@ -1194,32 +1203,36 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.4.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" -version = "6.4.4" +version = "6.4.5" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"}, - {file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"}, + {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, + {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, ] [package.dependencies] @@ -1616,13 +1629,13 @@ files = [ [[package]] name = "llama-cloud" -version = "0.0.15" +version = "0.0.17" description = "" optional = false python-versions = "<4,>=3.8" files = [ - {file = "llama_cloud-0.0.15-py3-none-any.whl", hash = "sha256:52f18a3870e23c4a9b5f66827a58dc87d5a1c3034d1ce6ab513ca7eb09ae8b36"}, - {file = "llama_cloud-0.0.15.tar.gz", hash = "sha256:be06fd888e889623796b9c2aa0fc0d09ef039ed5145ff267d8408ccbea70c048"}, + {file = "llama_cloud-0.0.17-py3-none-any.whl", hash = "sha256:da898dcc98de84f29886f979b1ccae1e96d9f73d1b0e07146a51d315b161e45c"}, + {file = "llama_cloud-0.0.17.tar.gz", hash = "sha256:7fd6857bbbb91937535572ccb48daa38189f55cdd7411185d8083dab29ba1299"}, ] [package.dependencies] @@ -1631,13 +1644,13 @@ pydantic = ">=1.10" [[package]] name = "llama-index-agent-openai" -version = "0.3.0" +version = "0.3.1" description = "llama-index agent openai integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_agent_openai-0.3.0-py3-none-any.whl", hash = "sha256:2b7d0e3d0e95271e5244e75a0366248c48d733497d93ae5bb09f548afe24ec98"}, - {file = "llama_index_agent_openai-0.3.0.tar.gz", hash = "sha256:dade70e8b987194d7afb6925f723060e9f4953eb134400da2fcd4ceedf2c3dff"}, + {file = "llama_index_agent_openai-0.3.1-py3-none-any.whl", hash = "sha256:21886081ae74574d8cad1ec4886f2d4b082284b9364adb46fff87aba1a25e9b9"}, + {file = "llama_index_agent_openai-0.3.1.tar.gz", hash = "sha256:41c9aa5b7cebc5043adddb3442e3167e97971589466e36448e04e0a767f5b9aa"}, ] [package.dependencies] @@ -1647,13 +1660,13 @@ openai = ">=1.14.0" [[package]] name = "llama-index-cli" -version = "0.3.0" +version = "0.3.1" description = "llama-index cli" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_cli-0.3.0-py3-none-any.whl", hash = "sha256:23227f305b7b320c7909f54ef2eeba90b9ad1a56231fbfbe1298280542bb9f24"}, - {file = "llama_index_cli-0.3.0.tar.gz", hash = "sha256:a42e01fe2a02aa0fd3b645eb1403f9058fa7f62fbeea2a06a55b7fb8c07d5d02"}, + {file = "llama_index_cli-0.3.1-py3-none-any.whl", hash = "sha256:2111fbb6973f5b1eabce0d6cca3986499f0f2f625b13d7f48269a49c64c027d4"}, + {file = "llama_index_cli-0.3.1.tar.gz", hash = "sha256:1890dd687cf440f3651365a549e303363162c167b8efbd87a3aa10058d6d5c77"}, ] [package.dependencies] @@ -1663,13 +1676,13 @@ llama-index-llms-openai = ">=0.2.0,<0.3.0" [[package]] name = "llama-index-core" -version = "0.11.1" +version = "0.11.9" description = "Interface between LLMs and your data" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_core-0.11.1-py3-none-any.whl", hash = "sha256:7d86c72be272c13f2c8fd5c62f27aba08a38e23cc5d28e02509d69909859102d"}, - {file = "llama_index_core-0.11.1.tar.gz", hash = "sha256:5c8b5cb1313866ff38ce91bea53a72519a52c9dabc0ca9d5c8b63aa8d3b2dd23"}, + {file = "llama_index_core-0.11.9-py3-none-any.whl", hash = "sha256:cd89742692d9662f4ad503724409ce9df69e2124fd89e580119f8716a42aad6c"}, + {file = "llama_index_core-0.11.9.tar.gz", hash = "sha256:aa11141161a0f40d22dae2d5fe08a3ead5ad209e0e1daf54f75643c816f3fd2d"}, ] [package.dependencies] @@ -1684,7 +1697,7 @@ networkx = ">=3.0" nltk = ">3.8.1" numpy = "<2.0.0" pillow = ">=9.0.0" -pydantic = ">=2.0.0,<3.0.0" +pydantic = ">=2.7.0,<3.0.0" PyYAML = ">=6.0.1" requests = ">=2.31.0" SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} @@ -1697,13 +1710,13 @@ wrapt = "*" [[package]] name = "llama-index-embeddings-openai" -version = "0.2.3" +version = "0.2.4" description = "llama-index embeddings openai integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_embeddings_openai-0.2.3-py3-none-any.whl", hash = "sha256:be7d2aad0884e54d291af786b23d2feb7770cd1c3950f0de1fd5e36c60d83c06"}, - {file = "llama_index_embeddings_openai-0.2.3.tar.gz", hash = "sha256:2f7adef6b61fd4f1bea487166ff9a5ff063227686b7dbb5d2227e46450a7ec4c"}, + {file = "llama_index_embeddings_openai-0.2.4-py3-none-any.whl", hash = "sha256:2ddf8e6a911c1c058b6bcc94299806a987f837dfa7a3c4a1d7df62dac99c2c7b"}, + {file = "llama_index_embeddings_openai-0.2.4.tar.gz", hash = "sha256:09e25ffb946dd1f95df15017236de45781a838dce65498559d0753c72effe617"}, ] [package.dependencies] @@ -1766,17 +1779,18 @@ query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "l [[package]] name = "llama-index-llms-openai" -version = "0.2.0" +version = "0.2.5" description = "llama-index llms openai integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_llms_openai-0.2.0-py3-none-any.whl", hash = "sha256:70c5d97b9b03fbb689e45b434fb71a7ff047bc7c38241e09be977bad64f61aba"}, - {file = "llama_index_llms_openai-0.2.0.tar.gz", hash = "sha256:13c85d4cf12bd07b9eab9805cbc42dfb2e35d0dfc9dc26720edd1bdf1c112a54"}, + {file = "llama_index_llms_openai-0.2.5-py3-none-any.whl", hash = "sha256:27bdaf045dfd88cf807fdc16214db963b7b5bbf762f7acc0d5c2827a87608816"}, + {file = "llama_index_llms_openai-0.2.5.tar.gz", hash = "sha256:042b52427d5d5dc178e6e30dd90c24eeb1618f65ff73010eaa96fca168c831ef"}, ] [package.dependencies] -llama-index-core = ">=0.11.0,<0.12.0" +llama-index-agent-openai = ">=0.3.1,<0.4.0" +llama-index-core = ">=0.11.7,<0.12.0" openai = ">=1.40.0,<2.0.0" [[package]] @@ -1828,13 +1842,13 @@ llama-index-program-openai = ">=0.2.0,<0.3.0" [[package]] name = "llama-index-readers-file" -version = "0.2.0" +version = "0.2.1" description = "llama-index readers file integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_readers_file-0.2.0-py3-none-any.whl", hash = "sha256:d9e88eacb313fbc2325445760feab611c6ae1a95ec61f4c3aec11908ccb31536"}, - {file = "llama_index_readers_file-0.2.0.tar.gz", hash = "sha256:55db7c31666bab2b2dd2f762d622f2dc8e73933943c92f8838868a901e505708"}, + {file = "llama_index_readers_file-0.2.1-py3-none-any.whl", hash = "sha256:0ba9dfe55cae939f7e33902400113a0c5530542de6b70f3b56b0d6f9ec8c5740"}, + {file = "llama_index_readers_file-0.2.1.tar.gz", hash = "sha256:c7f92074849fc59b10049d496a4ae52669abfcb159a199d9a113852a2fed70b8"}, ] [package.dependencies] @@ -1849,28 +1863,28 @@ pymupdf = ["pymupdf (>=1.23.21,<2.0.0)"] [[package]] name = "llama-index-readers-llama-parse" -version = "0.2.0" +version = "0.3.0" description = "llama-index readers llama-parse integration" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_readers_llama_parse-0.2.0-py3-none-any.whl", hash = "sha256:c0cb103fac8cd0a6de62a1b71a56884bef99a2d55c3afcabb073f078e727494f"}, - {file = "llama_index_readers_llama_parse-0.2.0.tar.gz", hash = "sha256:c54e8a207d73efb9f011636a30a4c1076b43d77a34d2563d374dc67c0cddfc83"}, + {file = "llama_index_readers_llama_parse-0.3.0-py3-none-any.whl", hash = "sha256:1973cc710dbd5e110c7500c9983ecb45787ad1ff92e6b2113f94a57cf48f3038"}, + {file = "llama_index_readers_llama_parse-0.3.0.tar.gz", hash = "sha256:a5feada0895714dcc41d65dd512c1c38cf70d8ae19947cff82b80d58e6aa367e"}, ] [package.dependencies] llama-index-core = ">=0.11.0,<0.12.0" -llama-parse = ">=0.4.0" +llama-parse = ">=0.5.0" [[package]] name = "llama-parse" -version = "0.5.0" +version = "0.5.5" description = "Parse files into RAG-Optimized formats." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_parse-0.5.0-py3-none-any.whl", hash = "sha256:8e1f0191070e7ab3ff18e2026eb76ed93ea016a2305da2762d9741b8a9b33fc7"}, - {file = "llama_parse-0.5.0.tar.gz", hash = "sha256:e3f943aa68b84df6e9d309682604eb749b2de0dd09c8310d9549b94bfcebb478"}, + {file = "llama_parse-0.5.5-py3-none-any.whl", hash = "sha256:03413620ede2130fb4bc3bad6021006f75dffa05137c358ae1ac60a60b143124"}, + {file = "llama_parse-0.5.5.tar.gz", hash = "sha256:20b00e09b14d1b57a1b040e4bd61436d51f43736b7b51003072a4d05b035ef44"}, ] [package.dependencies] @@ -2033,13 +2047,13 @@ files = [ [[package]] name = "mdit-py-plugins" -version = "0.4.1" +version = "0.4.2" description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" files = [ - {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, - {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, + {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, + {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, ] [package.dependencies] @@ -2085,13 +2099,13 @@ files = [ [[package]] name = "mkdocs" -version = "1.6.0" +version = "1.6.1" description = "Project documentation with Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs-1.6.0-py3-none-any.whl", hash = "sha256:1eb5cb7676b7d89323e62b56235010216319217d4af5ddc543a91beb8d125ea7"}, - {file = "mkdocs-1.6.0.tar.gz", hash = "sha256:a73f735824ef83a4f3bcb7a231dcab23f5a838f88b7efc54a0eef5fbdbc3c512"}, + {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, + {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, ] [package.dependencies] @@ -2116,13 +2130,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-autorefs" -version = "1.1.0" +version = "1.2.0" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_autorefs-1.1.0-py3-none-any.whl", hash = "sha256:492ac42f50214e81565e968f8cb0df9aba9d981542b9e7121b8f8ae9407fe6eb"}, - {file = "mkdocs_autorefs-1.1.0.tar.gz", hash = "sha256:f2fd43b11f66284bd014f9b542a05c8ecbfaad4e0d7b30b68584788217b6c656"}, + {file = "mkdocs_autorefs-1.2.0-py3-none-any.whl", hash = "sha256:d588754ae89bd0ced0c70c06f58566a4ee43471eeeee5202427da7de9ef85a2f"}, + {file = "mkdocs_autorefs-1.2.0.tar.gz", hash = "sha256:a86b93abff653521bda71cf3fc5596342b7a23982093915cb74273f67522190f"}, ] [package.dependencies] @@ -2182,13 +2196,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.33" +version = "9.5.34" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.33-py3-none-any.whl", hash = "sha256:dbc79cf0fdc6e2c366aa987de8b0c9d4e2bb9f156e7466786ba2fd0f9bf7ffca"}, - {file = "mkdocs_material-9.5.33.tar.gz", hash = "sha256:d23a8b5e3243c9b2f29cdfe83051104a8024b767312dc8fde05ebe91ad55d89d"}, + {file = "mkdocs_material-9.5.34-py3-none-any.whl", hash = "sha256:54caa8be708de2b75167fd4d3b9f3d949579294f49cb242515d4653dbee9227e"}, + {file = "mkdocs_material-9.5.34.tar.gz", hash = "sha256:1e60ddf716cfb5679dfd65900b8a25d277064ed82d9a53cd5190e3f894df7840"}, ] [package.dependencies] @@ -2228,7 +2242,6 @@ optional = false python-versions = ">=3.6" files = [ {file = "mkdocs-redirects-1.2.1.tar.gz", hash = "sha256:9420066d70e2a6bb357adf86e67023dcdca1857f97f07c7fe450f8f1fb42f861"}, - {file = "mkdocs_redirects-1.2.1-py3-none-any.whl", hash = "sha256:497089f9e0219e7389304cffefccdfa1cac5ff9509f2cb706f4c9b221726dffb"}, ] [package.dependencies] @@ -2270,118 +2283,124 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.10.8" +version = "1.10.9" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocstrings_python-1.10.8-py3-none-any.whl", hash = "sha256:bb12e76c8b071686617f824029cb1dfe0e9afe89f27fb3ad9a27f95f054dcd89"}, - {file = "mkdocstrings_python-1.10.8.tar.gz", hash = "sha256:5856a59cbebbb8deb133224a540de1ff60bded25e54d8beacc375bb133d39016"}, + {file = "mkdocstrings_python-1.10.9-py3-none-any.whl", hash = "sha256:cbe98710a6757dfd4dff79bf36cb9731908fb4c69dd2736b15270ae7a488243d"}, + {file = "mkdocstrings_python-1.10.9.tar.gz", hash = "sha256:f344aaa47e727d8a2dc911e063025e58e2b7fb31a41110ccc3902aa6be7ca196"}, ] [package.dependencies] griffe = ">=0.49" +mkdocs-autorefs = ">=1.0" mkdocstrings = ">=0.25" [[package]] name = "multidict" -version = "6.0.5" +version = "6.1.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "mypy" version = "0.991" @@ -2628,13 +2647,13 @@ files = [ [[package]] name = "openai" -version = "1.42.0" +version = "1.45.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.42.0-py3-none-any.whl", hash = "sha256:dc91e0307033a4f94931e5d03cc3b29b9717014ad5e73f9f2051b6cb5eda4d80"}, - {file = "openai-1.42.0.tar.gz", hash = "sha256:c9d31853b4e0bc2dc8bd08003b462a006035655a701471695d0bfdc08529cde3"}, + {file = "openai-1.45.0-py3-none-any.whl", hash = "sha256:2f1f7b7cf90f038a9f1c24f0d26c0f1790c102ec5acd07ffd70a9b7feac1ff4e"}, + {file = "openai-1.45.0.tar.gz", hash = "sha256:731207d10637335413aa3c0955f8f8df30d7636a4a0f9c381f2209d32cf8de97"}, ] [package.dependencies] @@ -2713,7 +2732,7 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" @@ -2915,19 +2934,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, + {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -3043,18 +3062,18 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" typing-extensions = [ {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, @@ -3062,103 +3081,104 @@ typing-extensions = [ [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [package.dependencies] @@ -3248,13 +3268,13 @@ image = ["Pillow (>=8.0.0)"] [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -3319,13 +3339,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -3565,90 +3585,105 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2024.7.24" +version = "2024.9.11" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, - {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, - {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, - {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, - {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, - {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, - {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, - {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, - {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, - {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, - {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, - {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, + {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, + {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, + {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, + {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, + {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, + {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, + {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, + {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, + {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, + {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, + {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, + {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, + {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, + {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, + {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, + {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, + {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, + {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, + {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, + {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, + {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, + {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, + {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, + {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, + {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, + {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, + {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, + {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, + {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, + {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, + {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, ] [[package]] @@ -3845,64 +3880,20 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.32" +version = "2.0.34" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, - {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, - {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, + {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") or extra == \"asyncio\""} typing-extensions = ">=4.6.0" [package.extras] @@ -4160,13 +4151,13 @@ files = [ [[package]] name = "types-docutils" -version = "0.21.0.20240724" +version = "0.21.0.20240907" description = "Typing stubs for docutils" optional = false python-versions = ">=3.8" files = [ - {file = "types-docutils-0.21.0.20240724.tar.gz", hash = "sha256:29ff7e27660f4fe76ea61d7e54d05ca3ce3b733ca9e8e8721e0fa587dbc10489"}, - {file = "types_docutils-0.21.0.20240724-py3-none-any.whl", hash = "sha256:bf51c6c488d23c0412f9b3ba10686fb1a6cb0b957ef04b45128d8a55c79ebb00"}, + {file = "types-docutils-0.21.0.20240907.tar.gz", hash = "sha256:5dd2aa5e2e06fcfa090020bc4115479b4dd28da3329ab708563ee29894bd3c0d"}, + {file = "types_docutils-0.21.0.20240907-py3-none-any.whl", hash = "sha256:9c8ed6d90583944af00f6b5fa3aecc2101e20672f6b1a4a299c6bf7d1e47084d"}, ] [[package]] @@ -4299,13 +4290,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -4316,13 +4307,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.4" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, + {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, ] [package.dependencies] @@ -4498,101 +4489,103 @@ files = [ [[package]] name = "yarl" -version = "1.9.4" +version = "1.11.1" description = "Yet another URL library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2164cd9725092761fed26f299e3f276bb4b537ca58e6ff6b252eae9631b5c96e"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08ea567c16f140af8ddc7cb58e27e9138a1386e3e6e53982abaa6f2377b38cc"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:768ecc550096b028754ea28bf90fde071c379c62c43afa574edc6f33ee5daaec"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2909fa3a7d249ef64eeb2faa04b7957e34fefb6ec9966506312349ed8a7e77bf"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01a8697ec24f17c349c4f655763c4db70eebc56a5f82995e5e26e837c6eb0e49"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e286580b6511aac7c3268a78cdb861ec739d3e5a2a53b4809faef6b49778eaff"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4179522dc0305c3fc9782549175c8e8849252fefeb077c92a73889ccbcd508ad"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:27fcb271a41b746bd0e2a92182df507e1c204759f460ff784ca614e12dd85145"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f61db3b7e870914dbd9434b560075e0366771eecbe6d2b5561f5bc7485f39efd"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c92261eb2ad367629dc437536463dc934030c9e7caca861cc51990fe6c565f26"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d95b52fbef190ca87d8c42f49e314eace4fc52070f3dfa5f87a6594b0c1c6e46"}, + {file = "yarl-1.11.1-cp310-cp310-win32.whl", hash = "sha256:489fa8bde4f1244ad6c5f6d11bb33e09cf0d1d0367edb197619c3e3fc06f3d91"}, + {file = "yarl-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:476e20c433b356e16e9a141449f25161e6b69984fb4cdbd7cd4bd54c17844998"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:946eedc12895873891aaceb39bceb484b4977f70373e0122da483f6c38faaa68"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21a7c12321436b066c11ec19c7e3cb9aec18884fe0d5b25d03d756a9e654edfe"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c35f493b867912f6fda721a59cc7c4766d382040bdf1ddaeeaa7fa4d072f4675"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25861303e0be76b60fddc1250ec5986c42f0a5c0c50ff57cc30b1be199c00e63"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b53f73077e839b3f89c992223f15b1d2ab314bdbdf502afdc7bb18e95eae27"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:327c724b01b8641a1bf1ab3b232fb638706e50f76c0b5bf16051ab65c868fac5"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307d9a3417eea87715c9736d050c83e8c1904e9b7aada6ce61b46361b733d92"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a28bed68ab8fb7e380775f0029a079f08a17799cb3387a65d14ace16c12e2b"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:067b961853c8e62725ff2893226fef3d0da060656a9827f3f520fb1d19b2b68a"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8215f6f21394d1f46e222abeb06316e77ef328d628f593502d8fc2a9117bde83"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:498442e3af2a860a663baa14fbf23fb04b0dd758039c0e7c8f91cb9279799bff"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:69721b8effdb588cb055cc22f7c5105ca6fdaa5aeb3ea09021d517882c4a904c"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e969fa4c1e0b1a391f3fcbcb9ec31e84440253325b534519be0d28f4b6b533e"}, + {file = "yarl-1.11.1-cp311-cp311-win32.whl", hash = "sha256:7d51324a04fc4b0e097ff8a153e9276c2593106a811704025bbc1d6916f45ca6"}, + {file = "yarl-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:15061ce6584ece023457fb8b7a7a69ec40bf7114d781a8c4f5dcd68e28b5c53b"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a4264515f9117be204935cd230fb2a052dd3792789cc94c101c535d349b3dab0"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f41fa79114a1d2eddb5eea7b912d6160508f57440bd302ce96eaa384914cd265"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02da8759b47d964f9173c8675710720b468aa1c1693be0c9c64abb9d8d9a4867"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9361628f28f48dcf8b2f528420d4d68102f593f9c2e592bfc842f5fb337e44fd"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b91044952da03b6f95fdba398d7993dd983b64d3c31c358a4c89e3c19b6f7aef"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74db2ef03b442276d25951749a803ddb6e270d02dda1d1c556f6ae595a0d76a8"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e975a2211952a8a083d1b9d9ba26472981ae338e720b419eb50535de3c02870"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aef97ba1dd2138112890ef848e17d8526fe80b21f743b4ee65947ea184f07a2"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7915ea49b0c113641dc4d9338efa9bd66b6a9a485ffe75b9907e8573ca94b84"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:504cf0d4c5e4579a51261d6091267f9fd997ef58558c4ffa7a3e1460bd2336fa"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3de5292f9f0ee285e6bd168b2a77b2a00d74cbcfa420ed078456d3023d2f6dff"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a34e1e30f1774fa35d37202bbeae62423e9a79d78d0874e5556a593479fdf239"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66b63c504d2ca43bf7221a1f72fbe981ff56ecb39004c70a94485d13e37ebf45"}, + {file = "yarl-1.11.1-cp312-cp312-win32.whl", hash = "sha256:a28b70c9e2213de425d9cba5ab2e7f7a1c8ca23a99c4b5159bf77b9c31251447"}, + {file = "yarl-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:17b5a386d0d36fb828e2fb3ef08c8829c1ebf977eef88e5367d1c8c94b454639"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1fa2e7a406fbd45b61b4433e3aa254a2c3e14c4b3186f6e952d08a730807fa0c"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:750f656832d7d3cb0c76be137ee79405cc17e792f31e0a01eee390e383b2936e"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b8486f322d8f6a38539136a22c55f94d269addb24db5cb6f61adc61eabc9d93"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fce4da3703ee6048ad4138fe74619c50874afe98b1ad87b2698ef95bf92c96d"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed653638ef669e0efc6fe2acb792275cb419bf9cb5c5049399f3556995f23c7"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18ac56c9dd70941ecad42b5a906820824ca72ff84ad6fa18db33c2537ae2e089"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688654f8507464745ab563b041d1fb7dab5d9912ca6b06e61d1c4708366832f5"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4973eac1e2ff63cf187073cd4e1f1148dcd119314ab79b88e1b3fad74a18c9d5"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:964a428132227edff96d6f3cf261573cb0f1a60c9a764ce28cda9525f18f7786"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6d23754b9939cbab02c63434776df1170e43b09c6a517585c7ce2b3d449b7318"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c2dc4250fe94d8cd864d66018f8344d4af50e3758e9d725e94fecfa27588ff82"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09696438cb43ea6f9492ef237761b043f9179f455f405279e609f2bc9100212a"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:999bfee0a5b7385a0af5ffb606393509cfde70ecca4f01c36985be6d33e336da"}, + {file = "yarl-1.11.1-cp313-cp313-win32.whl", hash = "sha256:ce928c9c6409c79e10f39604a7e214b3cb69552952fbda8d836c052832e6a979"}, + {file = "yarl-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:501c503eed2bb306638ccb60c174f856cc3246c861829ff40eaa80e2f0330367"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dae7bd0daeb33aa3e79e72877d3d51052e8b19c9025ecf0374f542ea8ec120e4"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3ff6b1617aa39279fe18a76c8d165469c48b159931d9b48239065767ee455b2b"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3257978c870728a52dcce8c2902bf01f6c53b65094b457bf87b2644ee6238ddc"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f351fa31234699d6084ff98283cb1e852270fe9e250a3b3bf7804eb493bd937"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aef1b64da41d18026632d99a06b3fefe1d08e85dd81d849fa7c96301ed22f1b"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7175a87ab8f7fbde37160a15e58e138ba3b2b0e05492d7351314a250d61b1591"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba444bdd4caa2a94456ef67a2f383710928820dd0117aae6650a4d17029fa25e"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ea9682124fc062e3d931c6911934a678cb28453f957ddccf51f568c2f2b5e05"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8418c053aeb236b20b0ab8fa6bacfc2feaaf7d4683dd96528610989c99723d5f"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:61a5f2c14d0a1adfdd82258f756b23a550c13ba4c86c84106be4c111a3a4e413"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f3a6d90cab0bdf07df8f176eae3a07127daafcf7457b997b2bf46776da2c7eb7"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:077da604852be488c9a05a524068cdae1e972b7dc02438161c32420fb4ec5e14"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:15439f3c5c72686b6c3ff235279630d08936ace67d0fe5c8d5bbc3ef06f5a420"}, + {file = "yarl-1.11.1-cp38-cp38-win32.whl", hash = "sha256:238a21849dd7554cb4d25a14ffbfa0ef380bb7ba201f45b144a14454a72ffa5a"}, + {file = "yarl-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:67459cf8cf31da0e2cbdb4b040507e535d25cfbb1604ca76396a3a66b8ba37a6"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:884eab2ce97cbaf89f264372eae58388862c33c4f551c15680dd80f53c89a269"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a336eaa7ee7e87cdece3cedb395c9657d227bfceb6781295cf56abcd3386a26"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87f020d010ba80a247c4abc335fc13421037800ca20b42af5ae40e5fd75e7909"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637c7ddb585a62d4469f843dac221f23eec3cbad31693b23abbc2c366ad41ff4"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48dfd117ab93f0129084577a07287376cc69c08138694396f305636e229caa1a"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e0ae31fb5ccab6eda09ba1494e87eb226dcbd2372dae96b87800e1dcc98804"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f81501160c28d0c0b7333b4f7be8983dbbc161983b6fb814024d1b4952f79"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04293941646647b3bfb1719d1d11ff1028e9c30199509a844da3c0f5919dc520"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:250e888fa62d73e721f3041e3a9abf427788a1934b426b45e1b92f62c1f68366"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e8f63904df26d1a66aabc141bfd258bf738b9bc7bc6bdef22713b4f5ef789a4c"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:aac44097d838dda26526cffb63bdd8737a2dbdf5f2c68efb72ad83aec6673c7e"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:267b24f891e74eccbdff42241c5fb4f974de2d6271dcc7d7e0c9ae1079a560d9"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6907daa4b9d7a688063ed098c472f96e8181733c525e03e866fb5db480a424df"}, + {file = "yarl-1.11.1-cp39-cp39-win32.whl", hash = "sha256:14438dfc5015661f75f85bc5adad0743678eefee266ff0c9a8e32969d5d69f74"}, + {file = "yarl-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:94d0caaa912bfcdc702a4204cd5e2bb01eb917fc4f5ea2315aa23962549561b0"}, + {file = "yarl-1.11.1-py3-none-any.whl", hash = "sha256:72bf26f66456baa0584eff63e44545c9f0eaed9b73cb6601b647c91f14c11f38"}, + {file = "yarl-1.11.1.tar.gz", hash = "sha256:1bb2d9e212fb7449b8fb73bc461b51eaa17cc8430b4a87d87be7b25052d92f53"}, ] [package.dependencies] @@ -4621,4 +4614,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "d50a2e23d473f9acf7dfc9fcacdaae05dfc85a9eb6f473e0d2d79b201781659a" +content-hash = "d9e288ecee5c65d0808fb002ae08194a74a38d416b4fb110a4efc470573452b6" diff --git a/pyproject.toml b/pyproject.toml index 4ff2362fb6a75..b70900f8dbae3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ explicit_package_bases = true ignore_missing_imports = true mypy_path = "llama_index" namespace_packages = true +plugins = "pydantic.mypy" python_version = "3.8" [tool.poetry] @@ -44,22 +45,22 @@ name = "llama-index" packages = [{from = "_llama-index", include = "llama_index"}] readme = "README.md" repository = "https://github.com/run-llama/llama_index" -version = "0.11.1" +version = "0.11.9" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" llama-index-legacy = "^0.9.48" -llama-index-llms-openai = "^0.2.0" -llama-index-embeddings-openai = "^0.2.0" +llama-index-llms-openai = "^0.2.3" +llama-index-embeddings-openai = "^0.2.4" llama-index-program-openai = "^0.2.0" llama-index-question-gen-openai = "^0.2.0" -llama-index-agent-openai = "^0.3.0" +llama-index-agent-openai = "^0.3.1" llama-index-readers-file = "^0.2.0" -llama-index-readers-llama-parse = ">=0.2.0" +llama-index-readers-llama-parse = ">=0.3.0" llama-index-indices-managed-llama-cloud = ">=0.3.0" -llama-index-core = "^0.11.1" +llama-index-core = "^0.11.9" llama-index-multi-modal-llms-openai = "^0.2.0" -llama-index-cli = "^0.3.0" +llama-index-cli = "^0.3.1" nltk = ">3.8.1" # avoids a CVE, temp until next release, should be in llama-index-core [tool.poetry.group.dev.dependencies]