From e9038a9d4875c1f4d30f9dbb6b16a3b2544e088d Mon Sep 17 00:00:00 2001 From: fcogidi <41602287+fcogidi@users.noreply.github.com> Date: Mon, 26 Jan 2026 13:36:30 -0500 Subject: [PATCH 1/4] Refactor codebase to to package utility modules for distribution --- aieng-agents-utils/.python-version | 1 + aieng-agents-utils/README.md | 353 ++++++++++++++++++ aieng-agents-utils/aieng/agents/__init__.py | 30 ++ .../aieng/agents}/agent_session.py | 5 +- .../aieng/agents}/async_utils.py | 2 + .../aieng/agents}/client_manager.py | 8 +- .../aieng/agents/data/__init__.py | 7 + .../aieng/agents}/data/batching.py | 0 .../aieng/agents}/data/chunk_hf_dataset.py | 2 +- .../aieng/agents}/data/load_dataset.py | 0 .../aieng/agents}/data/pdf_to_hf_dataset.py | 9 +- .../aieng/agents}/env_vars.py | 0 .../aieng/agents}/gradio/__init__.py | 2 + .../aieng/agents}/gradio/messages.py | 14 +- .../aieng/agents/langfuse/__init__.py | 13 + .../aieng/agents}/langfuse/oai_sdk_setup.py | 3 +- .../aieng/agents}/langfuse/otlp_env_setup.py | 4 +- .../aieng/agents}/langfuse/shared_client.py | 7 +- .../aieng/agents}/logging.py | 3 + .../aieng/agents}/pretty_printing.py | 3 + aieng-agents-utils/aieng/agents/prompts.py | 123 ++++++ .../aieng/agents/py.typed | 0 .../aieng/agents}/tools/README.md | 2 +- .../aieng/agents/tools/__init__.py | 21 ++ .../aieng/agents}/tools/code_interpreter.py | 4 +- .../aieng/agents}/tools/gemini_grounding.py | 2 + .../aieng/agents}/tools/news_events.py | 5 +- .../aieng/agents/tools/weaviate_kb.py | 6 +- .../aieng/agents}/web_search/.dockerignore | 0 .../aieng/agents}/web_search/.env.example | 0 .../aieng/agents}/web_search/Dockerfile | 0 .../aieng/agents}/web_search/README.md | 11 +- .../aieng/agents/web_search/__init__.py | 1 + .../aieng/agents}/web_search/app.py | 0 .../aieng/agents}/web_search/auth.py | 0 .../aieng/agents}/web_search/daily_usage.py | 0 .../aieng/agents}/web_search/db.py | 0 .../agents}/web_search/requirements-app.txt | 10 +- .../agents}/web_search/requirements_app.in | 0 aieng-agents-utils/pyproject.toml | 57 +++ aieng-agents-utils/tests/README.md | 10 + .../tests/data}/test_load_hf.py | 7 +- .../tests}/example_files/example_a.csv | 0 .../tests/tools}/test_code_interpreter.py | 16 +- .../tests/tools}/test_gemini_grounding.py | 7 +- .../tests/tools}/test_get_news_events.py | 5 +- .../tests/tools}/test_weaviate.py | 16 +- .../tests/web_search}/test_web_search_auth.py | 5 +- pyproject.toml | 42 +-- src/1_basics/0_search_demo/app.py | 3 +- src/1_basics/1_react_rag/app.py | 5 +- src/1_basics/1_react_rag/cli.py | 9 +- src/2_frameworks/1_react_rag/app.py | 15 +- src/2_frameworks/1_react_rag/cli.py | 73 ++-- .../1_react_rag/langfuse_gradio.py | 22 +- src/2_frameworks/2_multi_agent/efficient.py | 32 +- .../2_multi_agent/efficient_multiple_kbs.py | 71 +--- src/2_frameworks/2_multi_agent/fan_out.py | 8 +- src/2_frameworks/2_multi_agent/verbose.py | 67 +--- src/2_frameworks/3_code_interpreter/app.py | 46 +-- src/2_frameworks/4_mcp/app.py | 20 +- src/3_evals/1_llm_judge/run_eval.py | 11 +- src/3_evals/1_llm_judge/upload_data.py | 8 +- src/3_evals/2_synthetic_data/README.md | 2 +- .../2_synthetic_data/annotate_diversity.py | 6 +- .../gradio_visualize_diversity.py | 6 +- .../2_synthetic_data/synthesize_data.py | 15 +- .../2_synthetic_data/synthesize_data_e2b.py | 18 +- src/prompts.py | 12 - src/utils/__init__.py | 17 - src/utils/data/__init__.py | 4 - src/utils/langfuse/trace_id.py | 11 - src/utils/tools/__init__.py | 2 - src/utils/trees.py | 24 -- tests/README.md | 7 - tests/tool_tests/test_integration.py | 16 +- uv.lock | 111 ++++-- 77 files changed, 951 insertions(+), 506 deletions(-) create mode 100644 aieng-agents-utils/.python-version create mode 100644 aieng-agents-utils/README.md create mode 100644 aieng-agents-utils/aieng/agents/__init__.py rename {src/utils => aieng-agents-utils/aieng/agents}/agent_session.py (94%) rename {src/utils => aieng-agents-utils/aieng/agents}/async_utils.py (97%) rename {src/utils => aieng-agents-utils/aieng/agents}/client_manager.py (95%) create mode 100644 aieng-agents-utils/aieng/agents/data/__init__.py rename {src/utils => aieng-agents-utils/aieng/agents}/data/batching.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/data/chunk_hf_dataset.py (98%) rename {src/utils => aieng-agents-utils/aieng/agents}/data/load_dataset.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/data/pdf_to_hf_dataset.py (99%) rename {src/utils => aieng-agents-utils/aieng/agents}/env_vars.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/gradio/__init__.py (88%) rename {src/utils => aieng-agents-utils/aieng/agents}/gradio/messages.py (98%) create mode 100644 aieng-agents-utils/aieng/agents/langfuse/__init__.py rename {src/utils => aieng-agents-utils/aieng/agents}/langfuse/oai_sdk_setup.py (94%) rename {src/utils => aieng-agents-utils/aieng/agents}/langfuse/otlp_env_setup.py (89%) rename {src/utils => aieng-agents-utils/aieng/agents}/langfuse/shared_client.py (82%) rename {src/utils => aieng-agents-utils/aieng/agents}/logging.py (96%) rename {src/utils => aieng-agents-utils/aieng/agents}/pretty_printing.py (93%) create mode 100644 aieng-agents-utils/aieng/agents/prompts.py rename src/utils/web_search/__init__.py => aieng-agents-utils/aieng/agents/py.typed (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/tools/README.md (65%) create mode 100644 aieng-agents-utils/aieng/agents/tools/__init__.py rename {src/utils => aieng-agents-utils/aieng/agents}/tools/code_interpreter.py (97%) rename {src/utils => aieng-agents-utils/aieng/agents}/tools/gemini_grounding.py (99%) rename {src/utils => aieng-agents-utils/aieng/agents}/tools/news_events.py (98%) rename src/utils/tools/kb_weaviate.py => aieng-agents-utils/aieng/agents/tools/weaviate_kb.py (97%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/.dockerignore (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/.env.example (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/Dockerfile (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/README.md (95%) create mode 100644 aieng-agents-utils/aieng/agents/web_search/__init__.py rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/app.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/auth.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/daily_usage.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/db.py (100%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/requirements-app.txt (88%) rename {src/utils => aieng-agents-utils/aieng/agents}/web_search/requirements_app.in (100%) create mode 100644 aieng-agents-utils/pyproject.toml create mode 100644 aieng-agents-utils/tests/README.md rename {tests/data_tests => aieng-agents-utils/tests/data}/test_load_hf.py (82%) rename {tests/tool_tests => aieng-agents-utils/tests}/example_files/example_a.csv (100%) rename {tests/tool_tests => aieng-agents-utils/tests/tools}/test_code_interpreter.py (81%) rename {tests/tool_tests => aieng-agents-utils/tests/tools}/test_gemini_grounding.py (77%) rename {tests/tool_tests => aieng-agents-utils/tests/tools}/test_get_news_events.py (85%) rename {tests/tool_tests => aieng-agents-utils/tests/tools}/test_weaviate.py (79%) rename {tests => aieng-agents-utils/tests/web_search}/test_web_search_auth.py (99%) delete mode 100644 src/prompts.py delete mode 100644 src/utils/__init__.py delete mode 100644 src/utils/data/__init__.py delete mode 100644 src/utils/langfuse/trace_id.py delete mode 100644 src/utils/tools/__init__.py delete mode 100644 src/utils/trees.py delete mode 100644 tests/README.md diff --git a/aieng-agents-utils/.python-version b/aieng-agents-utils/.python-version new file mode 100644 index 0000000..e4fba21 --- /dev/null +++ b/aieng-agents-utils/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/aieng-agents-utils/README.md b/aieng-agents-utils/README.md new file mode 100644 index 0000000..624d02a --- /dev/null +++ b/aieng-agents-utils/README.md @@ -0,0 +1,353 @@ +# aieng-agents-utils + +A utility library for building AI agent applications with support for knowledge bases, code interpreter, web search, and observability. Built for the Vector Institute Agents Bootcamp +by the AI Engineering team. + +## Features + +### 🤖 Agent Tools + +- **Code Interpreter** - Execute Python code in isolated E2B sandboxes with file upload support +- **Gemini Grounding with Google Search** - Web search capabilities with citation tracking +- **Weaviate Knowledge Base** - Vector database integration for RAG applications +- **News Events** - Fetch structured current events from Wikipedia + +### 📊 Data Processing + +- **PDF to Dataset** - Convert PDF documents to HuggingFace datasets using multimodal OCR +- **Dataset Chunking** - Token-aware text chunking for embedding models +- **Dataset Loading** - Unified interface for loading datasets from multiple sources + +### 🔧 Utilities + +- **Async Client Manager** - Lifecycle management for async clients (OpenAI, Weaviate) +- **Progress Tracking** - Rich progress bars for async operations with rate limiting +- **Gradio Integration** - Message format conversion between Gradio and OpenAI SDK +- **Langfuse Integration** - OpenTelemetry-based observability and tracing +- **Environment Configuration** - Type-safe environment variable management with Pydantic +- **Session Management** - Persistent conversation sessions with SQLite backend + +## Installation + +### Using uv (recommended) + +```bash +uv pip install aieng-agents-utils +``` + +### Using pip + +```bash +pip install aieng-agents-utils +``` + +## Quick Start + +### Environment Setup + +Create a `.env` file with your API keys: + +```env +# Required for most features +OPENAI_API_KEY=your_openai_key +# or +GEMINI_API_KEY=your_gemini_key + +# For Weaviate knowledge base +WEAVIATE_API_KEY=your_weaviate_key +WEAVIATE_HTTP_HOST=your_instance.weaviate.cloud +WEAVIATE_GRPC_HOST=grpc-your_instance.weaviate.cloud + +# For code interpreter (optional) +E2B_API_KEY=your_e2b_key + +# For Langfuse observability (optional) +LANGFUSE_PUBLIC_KEY=pk-lf-xxx +LANGFUSE_SECRET_KEY=sk-lf-xxx + +# For embedding models (optional) +EMBEDDING_API_KEY=your_embedding_key +EMBEDDING_BASE_URL=https://your-embedding-service +``` + +### Basic Usage Examples + +#### Using Tools with OpenAI Agents SDK + +```python +from aieng.agents.tools import ( + CodeInterpreter, + AsyncWeaviateKnowledgeBase, + get_weaviate_async_client, +) +from aieng.agents import AsyncClientManager +import agents + +# Initialize client manager +manager = AsyncClientManager() + +# Create an agent with tools +agent = agents.Agent( + name="Research Assistant", + instructions="Help users with code and research questions.", + tools=[ + agents.function_tool(manager.knowledgebase.search_knowledgebase), + agents.function_tool(CodeInterpreter().run_code), + ], + model=agents.OpenAIChatCompletionsModel( + model="gpt-4o", + openai_client=manager.openai_client, + ), +) + +# Run the agent +response = await agents.Runner.run( + agent, + input="Search for information about transformers and create a visualization." +) + +# Clean up +await manager.close() +``` + +#### Using the Code Interpreter + +```python +from aieng.agents.tools import CodeInterpreter + +interpreter = CodeInterpreter( + template=" agents.SQLiteSession: """Get existing session or create a new one for conversation persistence.""" diff --git a/src/utils/async_utils.py b/aieng-agents-utils/aieng/agents/async_utils.py similarity index 97% rename from src/utils/async_utils.py rename to aieng-agents-utils/aieng/agents/async_utils.py index 8515083..56ad5cd 100644 --- a/src/utils/async_utils.py +++ b/aieng-agents-utils/aieng/agents/async_utils.py @@ -15,6 +15,8 @@ T = TypeVar("T") +__all__ = ["gather_with_progress", "rate_limited"] + async def indexed(index: int, coro: Coroutine[None, None, T]) -> tuple[int, T]: """Return (index, await coro).""" diff --git a/src/utils/client_manager.py b/aieng-agents-utils/aieng/agents/client_manager.py similarity index 95% rename from src/utils/client_manager.py rename to aieng-agents-utils/aieng/agents/client_manager.py index b2a5b27..916e1a8 100644 --- a/src/utils/client_manager.py +++ b/aieng-agents-utils/aieng/agents/client_manager.py @@ -5,12 +5,14 @@ hot-reload process. """ +from aieng.agents.env_vars import Configs +from aieng.agents.tools.weaviate_kb import ( + AsyncWeaviateKnowledgeBase, + get_weaviate_async_client, +) from openai import AsyncOpenAI from weaviate.client import WeaviateAsyncClient -from .env_vars import Configs -from .tools.kb_weaviate import AsyncWeaviateKnowledgeBase, get_weaviate_async_client - class AsyncClientManager: """Manages async client lifecycle with lazy initialization and cleanup. diff --git a/aieng-agents-utils/aieng/agents/data/__init__.py b/aieng-agents-utils/aieng/agents/data/__init__.py new file mode 100644 index 0000000..1bf807a --- /dev/null +++ b/aieng-agents-utils/aieng/agents/data/__init__.py @@ -0,0 +1,7 @@ +"""Utilities for handling data.""" + +from aieng.agents.data.batching import create_batches +from aieng.agents.data.load_dataset import get_dataset, get_dataset_url_hash + + +__all__ = ["create_batches", "get_dataset", "get_dataset_url_hash"] diff --git a/src/utils/data/batching.py b/aieng-agents-utils/aieng/agents/data/batching.py similarity index 100% rename from src/utils/data/batching.py rename to aieng-agents-utils/aieng/agents/data/batching.py diff --git a/src/utils/data/chunk_hf_dataset.py b/aieng-agents-utils/aieng/agents/data/chunk_hf_dataset.py similarity index 98% rename from src/utils/data/chunk_hf_dataset.py rename to aieng-agents-utils/aieng/agents/data/chunk_hf_dataset.py index 3e0279a..19f0737 100644 --- a/src/utils/data/chunk_hf_dataset.py +++ b/aieng-agents-utils/aieng/agents/data/chunk_hf_dataset.py @@ -1,4 +1,4 @@ -"""Script to chunk text data from a HuggingFace dataset.""" +"""Script for chunking text data from a HuggingFace dataset.""" import os from collections import defaultdict diff --git a/src/utils/data/load_dataset.py b/aieng-agents-utils/aieng/agents/data/load_dataset.py similarity index 100% rename from src/utils/data/load_dataset.py rename to aieng-agents-utils/aieng/agents/data/load_dataset.py diff --git a/src/utils/data/pdf_to_hf_dataset.py b/aieng-agents-utils/aieng/agents/data/pdf_to_hf_dataset.py similarity index 99% rename from src/utils/data/pdf_to_hf_dataset.py rename to aieng-agents-utils/aieng/agents/data/pdf_to_hf_dataset.py index 931bcaa..68c683c 100644 --- a/src/utils/data/pdf_to_hf_dataset.py +++ b/aieng-agents-utils/aieng/agents/data/pdf_to_hf_dataset.py @@ -13,11 +13,10 @@ Examples -------- Transcribe a single PDF and save to ``hf_dataset``: - uv run --env-file .env src/utils/data/pdf_to_hf_dataset.py \ - --input-path ./docs/example.pdf + uv run --env-file .env pdf_to_hf_dataset.py --input-path ./docs/example.pdf Transcribe a folder recursively with a smaller DPI and a custom output: - uv run --env-file .env src/utils/data/pdf_to_hf_dataset.py \ + uv run --env-file .env pdf_to_hf_dataset.py \ --input-path ./docs --recursive --dpi 150 --output-dir ./out_dataset Notes @@ -789,6 +788,8 @@ def main( hub_repo_id: str | None, ) -> None: """Convert PDFs to a chunked HuggingFace dataset.""" + load_dotenv() + if chunk_overlap >= chunk_size: raise ValueError("chunk_overlap must be smaller than chunk_size.") @@ -847,6 +848,4 @@ def main( if __name__ == "__main__": - load_dotenv() - main() diff --git a/src/utils/env_vars.py b/aieng-agents-utils/aieng/agents/env_vars.py similarity index 100% rename from src/utils/env_vars.py rename to aieng-agents-utils/aieng/agents/env_vars.py diff --git a/src/utils/gradio/__init__.py b/aieng-agents-utils/aieng/agents/gradio/__init__.py similarity index 88% rename from src/utils/gradio/__init__.py rename to aieng-agents-utils/aieng/agents/gradio/__init__.py index f89efb6..3c1b2e0 100644 --- a/src/utils/gradio/__init__.py +++ b/aieng-agents-utils/aieng/agents/gradio/__init__.py @@ -1,3 +1,5 @@ +"""Utilities for managing Gradio interface.""" + import gradio as gr diff --git a/src/utils/gradio/messages.py b/aieng-agents-utils/aieng/agents/gradio/messages.py similarity index 98% rename from src/utils/gradio/messages.py rename to aieng-agents-utils/aieng/agents/gradio/messages.py index 096217f..5a049da 100644 --- a/src/utils/gradio/messages.py +++ b/aieng-agents-utils/aieng/agents/gradio/messages.py @@ -6,19 +6,27 @@ from typing import TYPE_CHECKING import gradio as gr -from agents import StreamEvent, stream_events -from agents.items import MessageOutputItem, RunItem, ToolCallItem, ToolCallOutputItem from gradio.components.chatbot import ChatMessage, MetadataDict from openai.types.responses import ResponseFunctionToolCall, ResponseOutputText from openai.types.responses.response_completed_event import ResponseCompletedEvent from openai.types.responses.response_output_message import ResponseOutputMessage from PIL import Image +from agents import StreamEvent, stream_events +from agents.items import MessageOutputItem, RunItem, ToolCallItem, ToolCallOutputItem + if TYPE_CHECKING: from openai.types.chat import ChatCompletionMessageParam +__all__ = [ + "gradio_messages_to_oai_chat", + "oai_agent_items_to_gradio_messages", + "oai_agent_stream_to_gradio_messages", +] + + def gradio_messages_to_oai_chat( messages: list[ChatMessage | dict], ) -> list["ChatCompletionMessageParam"]: @@ -169,7 +177,6 @@ def oai_agent_stream_to_gradio_messages( if isinstance(stream_event, stream_events.RawResponsesStreamEvent): data = stream_event.data if isinstance(data, ResponseCompletedEvent): - print(stream_event) # The completed event may contain multiple output messages, # including tool calls and final outputs. # If there is at least one tool call, we mark the response as a thought. @@ -210,7 +217,6 @@ def oai_agent_stream_to_gradio_messages( item = stream_event.item if name == "tool_output" and isinstance(item, ToolCallOutputItem): - print(stream_event) text_content, images = _process_tool_output_for_images(item.output) output.append( diff --git a/aieng-agents-utils/aieng/agents/langfuse/__init__.py b/aieng-agents-utils/aieng/agents/langfuse/__init__.py new file mode 100644 index 0000000..daea837 --- /dev/null +++ b/aieng-agents-utils/aieng/agents/langfuse/__init__.py @@ -0,0 +1,13 @@ +"""Utilities for Langfuse integration.""" + +from aieng.agents.langfuse.oai_sdk_setup import setup_langfuse_tracer +from aieng.agents.langfuse.otlp_env_setup import set_up_langfuse_otlp_env_vars +from aieng.agents.langfuse.shared_client import flush_langfuse, langfuse_client + + +__all__ = [ + "flush_langfuse", + "langfuse_client", + "set_up_langfuse_otlp_env_vars", + "setup_langfuse_tracer", +] diff --git a/src/utils/langfuse/oai_sdk_setup.py b/aieng-agents-utils/aieng/agents/langfuse/oai_sdk_setup.py similarity index 94% rename from src/utils/langfuse/oai_sdk_setup.py rename to aieng-agents-utils/aieng/agents/langfuse/oai_sdk_setup.py index 8432cc3..53c5c57 100644 --- a/src/utils/langfuse/oai_sdk_setup.py +++ b/aieng-agents-utils/aieng/agents/langfuse/oai_sdk_setup.py @@ -6,13 +6,12 @@ import logfire import nest_asyncio +from aieng.agents.langfuse.otlp_env_setup import set_up_langfuse_otlp_env_vars from opentelemetry import trace from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import SimpleSpanProcessor -from .otlp_env_setup import set_up_langfuse_otlp_env_vars - def configure_oai_agents_sdk(service_name: str) -> None: """Register Langfuse as tracing provider for OAI Agents SDK.""" diff --git a/src/utils/langfuse/otlp_env_setup.py b/aieng-agents-utils/aieng/agents/langfuse/otlp_env_setup.py similarity index 89% rename from src/utils/langfuse/otlp_env_setup.py rename to aieng-agents-utils/aieng/agents/langfuse/otlp_env_setup.py index 68d828f..e1346ea 100644 --- a/src/utils/langfuse/otlp_env_setup.py +++ b/aieng-agents-utils/aieng/agents/langfuse/otlp_env_setup.py @@ -4,10 +4,10 @@ import logging import os -from ..env_vars import Configs +from aieng.agents.env_vars import Configs -def set_up_langfuse_otlp_env_vars(): +def set_up_langfuse_otlp_env_vars() -> None: """Set up environment variables for Langfuse OpenTelemetry integration. OTLP = OpenTelemetry Protocol. diff --git a/src/utils/langfuse/shared_client.py b/aieng-agents-utils/aieng/agents/langfuse/shared_client.py similarity index 82% rename from src/utils/langfuse/shared_client.py rename to aieng-agents-utils/aieng/agents/langfuse/shared_client.py index bc2c16a..7794de1 100644 --- a/src/utils/langfuse/shared_client.py +++ b/aieng-agents-utils/aieng/agents/langfuse/shared_client.py @@ -2,13 +2,12 @@ from os import getenv +from aieng.agents.env_vars import Configs from langfuse import Langfuse from rich.progress import Progress, SpinnerColumn, TextColumn -from ..env_vars import Configs - -__all__ = ["langfuse_client"] +__all__ = ["flush_langfuse", "langfuse_client"] config = Configs() @@ -18,7 +17,7 @@ ) -def flush_langfuse(client: "Langfuse | None" = None): +def flush_langfuse(client: "Langfuse | None" = None) -> None: """Flush shared LangFuse Client. Rich Progress included.""" if client is None: client = langfuse_client diff --git a/src/utils/logging.py b/aieng-agents-utils/aieng/agents/logging.py similarity index 96% rename from src/utils/logging.py rename to aieng-agents-utils/aieng/agents/logging.py index a0264e2..3b7d37d 100644 --- a/src/utils/logging.py +++ b/aieng-agents-utils/aieng/agents/logging.py @@ -4,6 +4,9 @@ import warnings +__all__ = ["set_up_logging"] + + class IgnoreOpenAI401Filter(logging.Filter): """ A logging filter that excludes specific OpenAI client error messages. diff --git a/src/utils/pretty_printing.py b/aieng-agents-utils/aieng/agents/pretty_printing.py similarity index 93% rename from src/utils/pretty_printing.py rename to aieng-agents-utils/aieng/agents/pretty_printing.py index fdb5557..e8320a1 100644 --- a/src/utils/pretty_printing.py +++ b/aieng-agents-utils/aieng/agents/pretty_printing.py @@ -6,6 +6,9 @@ import pydantic +__all__ = ["pretty_print"] + + def _serializer(item: Any) -> dict[str, Any] | str: """Serialize using heuristics.""" if isinstance(item, pydantic.BaseModel): diff --git a/aieng-agents-utils/aieng/agents/prompts.py b/aieng-agents-utils/aieng/agents/prompts.py new file mode 100644 index 0000000..44b8258 --- /dev/null +++ b/aieng-agents-utils/aieng/agents/prompts.py @@ -0,0 +1,123 @@ +"""Centralized location for all system prompts.""" + +REACT_INSTRUCTIONS = """\ +Answer the question using the search tool. \ +EACH TIME before invoking the function, you must explain your reasons for doing so. \ +Be sure to mention the sources in your response. \ +If the search tool did not return intended results, try again. \ +For best performance, divide complex queries into simpler sub-queries. \ +Do not make up information. \ +For facts that might change over time, you must use the search tool to retrieve the \ +most up-to-date information. +""" + +CODE_INTERPRETER_INSTRUCTIONS = """\ +The `code_interpreter` tool executes Python commands. \ +Please note that data is not persisted. Each time you invoke this tool, \ +you will need to run import and define all variables from scratch. + +You can access the local filesystem using this tool. \ +Instead of asking the user for file inputs, you should try to find the file \ +using this tool. + +Recommended packages: Pandas, Numpy, SymPy, Scikit-learn, Matplotlib, Seaborn. + +Use Matplotlib to create visualizations. Make sure to call `plt.show()` so that +the plot is captured and returned to the user. + +You can also run Jupyter-style shell commands (e.g., `!pip freeze`) +but you won't be able to install packages. +""" + +SEARCH_AGENT_INSTRUCTIONS = """\ +You are a search agent. You receive a single search query as input. \ +Use the search tool to perform a search, then produce a concise \ +'search summary' of the key findings. \ +For every fact you include in the summary, ALWAYS include a citation \ +both in-line and at the end of the summary as a numbered list. The \ +citation at the end should include relevant metadata from the search \ +results. Do NOT return raw search results. " +""" + +WIKI_SEARCH_PLANNER_INSTRUCTIONS = """\ +You are a research planner. \ +Given a user's query, produce a list of search terms that can be used to retrieve +relevant information from a knowledge base to answer the question. \ +As you are not able to clarify from the user what they are looking for, \ +your search terms should be broad and cover various aspects of the query. \ +Output up to 10 search terms to query the knowledge base. \ +Note that the knowledge base is a Wikipedia dump and cuts off at May 2025. +""" + +KB_RESEARCHER_INSTRUCTIONS = """\ +You are a research assistant with access to a knowledge base. \ +Given a potentially broad search term, your task is to use the search tool to \ +retrieve relevant information from the knowledge base and produce a short \ +summary of at most 300 words. You must pass the initial search term directly to \ +the search tool without any modifications and, only if necessary, refine your \ +search based on the results you get back. Your summary must be based solely on \ +a synthesis of all the search results and should not include any information that \ +is not present in the search results. For every fact you include in the summary, \ +ALWAYS include a citation both in-line and at the end of the summary as a numbered \ +list. The citation at the end should include relevant metadata from the search \ +results. Do NOT return raw search results. +""" + +WRITER_INSTRUCTIONS = """\ +You are an expert at synthesizing information and writing coherent reports. \ +Given a user's query and a set of search summaries, synthesize these into a \ +coherent report that answers the user's question. The length of the report should be \ +proportional to the complexity of the question. For queries that are more complex, \ +ensure that the report is well-structured, with clear sections and headings where \ +appropriate. Make sure to use the citations from the search summaries to back up \ +any factual claims you make. \ +Do not make up any information outside of the search summaries. +""" + +WIKI_AND_WEB_ORCHESTRATOR_INSTRUCTIONS = """\ +You are a deep research agent and your goal is to conduct in-depth, multi-turn +research by breaking down complex queries, using the provided tools, and +synthesizing the information into a comprehensive report. + +You have access to the following tools: +1. 'search_knowledgebase' - use this tool to search for information in a + knowledge base. The knowledge base reflects a subset of Wikipedia as + of May 2025. +2. 'get_web_search_grounded_response' - use this tool for current events, + news, fact-checking or when the information in the knowledge base is + not sufficient to answer the question. + +Both tools will not return raw search results or the sources themselves. +Instead, they will return a concise summary of the key findings, along +with the sources used to generate the summary. + +For best performance, divide complex queries into simpler sub-queries +Before calling either tool, always explain your reasoning for doing so. + +Note that the 'get_web_search_grounded_response' tool will expand the query +into multiple search queries and execute them. It will also return the +queries it executed. Do not repeat them. + +**Routing Guidelines:** +- When answering a question, you should first try to use the 'search_knowledgebase' +tool, unless the question requires recent information after May 2025 or +has explicit recency cues. +- If either tool returns insufficient information for a given query, try +reformulating or using the other tool. You can call either tool multiple +times to get the information you need to answer the user's question. + +**Guidelines for synthesis** +- After collecting results, write the final answer from your own synthesis. +- Add a "Sources" section listing unique sources, formatted as: + [1] Publisher - URL + [2] Wikipedia: (Section:
) +Order by first mention in your text. Every factual sentence in your final +response must map to at least one source. +- If web and knowledge base disagree, surface the disagreement and prefer sources +with newer publication dates. +- Do not invent URLs or sources. +- If both tools fail, say so and suggest 2–3 refined queries. + +Be sure to mention the sources in your response, including the URL if available, +and do not make up information. +""" diff --git a/src/utils/web_search/__init__.py b/aieng-agents-utils/aieng/agents/py.typed similarity index 100% rename from src/utils/web_search/__init__.py rename to aieng-agents-utils/aieng/agents/py.typed diff --git a/src/utils/tools/README.md b/aieng-agents-utils/aieng/agents/tools/README.md similarity index 65% rename from src/utils/tools/README.md rename to aieng-agents-utils/aieng/agents/tools/README.md index bc4816e..1da26ae 100644 --- a/src/utils/tools/README.md +++ b/aieng-agents-utils/aieng/agents/tools/README.md @@ -4,5 +4,5 @@ This module contains various tools for LLM agents. ```bash # Tool for getting a list of recent news headlines from enwiki -uv run --env-file .env python3 src/utils/tools/news_events.py +uv run --env-file .env python3 aieng-agent-utils/aieng/tools/news_events.py ``` diff --git a/aieng-agents-utils/aieng/agents/tools/__init__.py b/aieng-agents-utils/aieng/agents/tools/__init__.py new file mode 100644 index 0000000..79a00cc --- /dev/null +++ b/aieng-agents-utils/aieng/agents/tools/__init__.py @@ -0,0 +1,21 @@ +"""Reusable tools for AI agents.""" + +from aieng.agents.tools.code_interpreter import CodeInterpreter, CodeInterpreterOutput +from aieng.agents.tools.gemini_grounding import GeminiGroundingWithGoogleSearch +from aieng.agents.tools.news_events import CurrentEvents, NewsEvent, get_news_events +from aieng.agents.tools.weaviate_kb import ( + AsyncWeaviateKnowledgeBase, + get_weaviate_async_client, +) + + +__all__ = [ + "CodeInterpreter", + "CodeInterpreterOutput", + "GeminiGroundingWithGoogleSearch", + "AsyncWeaviateKnowledgeBase", + "get_weaviate_async_client", + "CurrentEvents", + "NewsEvent", + "get_news_events", +] diff --git a/src/utils/tools/code_interpreter.py b/aieng-agents-utils/aieng/agents/tools/code_interpreter.py similarity index 97% rename from src/utils/tools/code_interpreter.py rename to aieng-agents-utils/aieng/agents/tools/code_interpreter.py index 6057e54..3565b51 100644 --- a/src/utils/tools/code_interpreter.py +++ b/aieng-agents-utils/aieng/agents/tools/code_interpreter.py @@ -4,11 +4,13 @@ from pathlib import Path from typing import Sequence +from aieng.agents.async_utils import gather_with_progress from e2b_code_interpreter import AsyncSandbox from e2b_code_interpreter.models import serialize_results from pydantic import BaseModel -from ..async_utils import gather_with_progress + +__all__ = ["CodeInterpreter", "CodeInterpreterOutput"] class _CodeInterpreterOutputError(BaseModel): diff --git a/src/utils/tools/gemini_grounding.py b/aieng-agents-utils/aieng/agents/tools/gemini_grounding.py similarity index 99% rename from src/utils/tools/gemini_grounding.py rename to aieng-agents-utils/aieng/agents/tools/gemini_grounding.py index 5bc95be..af6a836 100644 --- a/src/utils/tools/gemini_grounding.py +++ b/aieng-agents-utils/aieng/agents/tools/gemini_grounding.py @@ -13,6 +13,8 @@ RETRYABLE_STATUS = {429, 500, 502, 503, 504} +__all__ = ["GeminiGroundingWithGoogleSearch", "GroundedResponse"] + class ModelSettings(BaseModel): """Configuration for the Gemini model used for web search.""" diff --git a/src/utils/tools/news_events.py b/aieng-agents-utils/aieng/agents/tools/news_events.py similarity index 98% rename from src/utils/tools/news_events.py rename to aieng-agents-utils/aieng/agents/tools/news_events.py index 06a5a94..fae5d9e 100644 --- a/src/utils/tools/news_events.py +++ b/aieng-agents-utils/aieng/agents/tools/news_events.py @@ -1,8 +1,6 @@ #!/usr/bin/env python3 """Fetch and parse Wikipedia Current Events into structured data using Pydantic.""" -from __future__ import annotations - import argparse import asyncio import random @@ -16,6 +14,9 @@ from rich.progress import Progress, SpinnerColumn, TextColumn, TimeElapsedColumn +__all__ = ["get_news_events", "NewsEvent", "CurrentEvents"] + + class NewsEvent(BaseModel): """Represents a single current event item.""" diff --git a/src/utils/tools/kb_weaviate.py b/aieng-agents-utils/aieng/agents/tools/weaviate_kb.py similarity index 97% rename from src/utils/tools/kb_weaviate.py rename to aieng-agents-utils/aieng/agents/tools/weaviate_kb.py index d8c45e3..1c3c466 100644 --- a/src/utils/tools/kb_weaviate.py +++ b/aieng-agents-utils/aieng/agents/tools/weaviate_kb.py @@ -8,10 +8,12 @@ import openai import pydantic import weaviate +from aieng.agents.async_utils import rate_limited +from aieng.agents.env_vars import Configs from weaviate import WeaviateAsyncClient -from ..async_utils import rate_limited -from ..env_vars import Configs + +__all__ = ["AsyncWeaviateKnowledgeBase", "get_weaviate_async_client"] class _Source(pydantic.BaseModel): diff --git a/src/utils/web_search/.dockerignore b/aieng-agents-utils/aieng/agents/web_search/.dockerignore similarity index 100% rename from src/utils/web_search/.dockerignore rename to aieng-agents-utils/aieng/agents/web_search/.dockerignore diff --git a/src/utils/web_search/.env.example b/aieng-agents-utils/aieng/agents/web_search/.env.example similarity index 100% rename from src/utils/web_search/.env.example rename to aieng-agents-utils/aieng/agents/web_search/.env.example diff --git a/src/utils/web_search/Dockerfile b/aieng-agents-utils/aieng/agents/web_search/Dockerfile similarity index 100% rename from src/utils/web_search/Dockerfile rename to aieng-agents-utils/aieng/agents/web_search/Dockerfile diff --git a/src/utils/web_search/README.md b/aieng-agents-utils/aieng/agents/web_search/README.md similarity index 95% rename from src/utils/web_search/README.md rename to aieng-agents-utils/aieng/agents/web_search/README.md index 90bb5e8..ccebf4e 100644 --- a/src/utils/web_search/README.md +++ b/aieng-agents-utils/aieng/agents/web_search/README.md @@ -1,6 +1,6 @@ # Gemini Grounding Proxy -This service packages the code in `src/utils/web_search` into a FastAPI +This service packages the code in `aieng-agents-utils/aieng/agents/web_search` into a FastAPI application. It plays a dual role in the Agent Bootcamp project: - **Agent tooling showcase.** The proxy demonstrates how you can wrap a third-party @@ -24,6 +24,7 @@ production deployment on Google Cloud Run. - Access to a Google Cloud project with billing enabled Recommended: + - `uv` or `pip` for dependency management - Ability to set environment variables from `.env` files @@ -96,10 +97,10 @@ Keep `.env.example` up to date so teammates can copy it into their own `.env`. ```bash python -m venv .venv source .venv/bin/activate - pip install -r src/utils/web_search/requirements-app.txt + pip install -r aieng-agents-utils/aieng/agents/web_search/requirements-app.txt ``` - (Or use `uv pip install -r src/utils/web_search/requirements-app.txt`.) + (Or use `uv pip install -r aieng-agents-utils/aieng/agents/web_search/requirements-app.txt`.) 5. **Run unit tests** @@ -112,7 +113,7 @@ Keep `.env.example` up to date so teammates can copy it into their own `.env`. ```bash uvicorn utils.web_search.app:app \ --reload \ - --reload-dir src/utils/web_search \ + --reload-dir aieng-agents-utils/aieng/agents/web_search \ --port 8080 ``` @@ -224,7 +225,7 @@ export REGION=us-central1 export IMAGE_NAME=grounding-proxy export TAG=$(date +%Y%m%d%H%M) -gcloud builds submit src/utils/web_search \ +gcloud builds submit aieng-agents-utils/aieng/agents/web_search \ --tag "$REGION-docker.pkg.dev/$PROJECT/web-search/$IMAGE_NAME:$TAG" ``` diff --git a/aieng-agents-utils/aieng/agents/web_search/__init__.py b/aieng-agents-utils/aieng/agents/web_search/__init__.py new file mode 100644 index 0000000..cdf5621 --- /dev/null +++ b/aieng-agents-utils/aieng/agents/web_search/__init__.py @@ -0,0 +1 @@ +"""Implementation of proxy service for Gemini web grounding with Google Search.""" diff --git a/src/utils/web_search/app.py b/aieng-agents-utils/aieng/agents/web_search/app.py similarity index 100% rename from src/utils/web_search/app.py rename to aieng-agents-utils/aieng/agents/web_search/app.py diff --git a/src/utils/web_search/auth.py b/aieng-agents-utils/aieng/agents/web_search/auth.py similarity index 100% rename from src/utils/web_search/auth.py rename to aieng-agents-utils/aieng/agents/web_search/auth.py diff --git a/src/utils/web_search/daily_usage.py b/aieng-agents-utils/aieng/agents/web_search/daily_usage.py similarity index 100% rename from src/utils/web_search/daily_usage.py rename to aieng-agents-utils/aieng/agents/web_search/daily_usage.py diff --git a/src/utils/web_search/db.py b/aieng-agents-utils/aieng/agents/web_search/db.py similarity index 100% rename from src/utils/web_search/db.py rename to aieng-agents-utils/aieng/agents/web_search/db.py diff --git a/src/utils/web_search/requirements-app.txt b/aieng-agents-utils/aieng/agents/web_search/requirements-app.txt similarity index 88% rename from src/utils/web_search/requirements-app.txt rename to aieng-agents-utils/aieng/agents/web_search/requirements-app.txt index 2063260..3d5634e 100644 --- a/src/utils/web_search/requirements-app.txt +++ b/aieng-agents-utils/aieng/agents/web_search/requirements-app.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile src/utils/web_search/requirements_app.in -o src/utils/web_search/requirements-app.txt +# uv pip compile aieng-agents-utils/aieng/agents/web_search/requirements_app.in -o aieng-agents-utils/aieng/agents/web_search/requirements-app.txt annotated-doc==0.0.3 # via fastapi annotated-types==0.7.0 @@ -32,7 +32,7 @@ email-validator==2.3.0 # fastapi # pydantic fastapi==0.120.2 - # via -r src/utils/web_search/requirements_app.in + # via -r aieng-agents-utils/aieng/agents/web_search/requirements_app.in fastapi-cli==0.0.14 # via fastapi fastapi-cloud-cli==0.3.1 @@ -50,9 +50,9 @@ google-auth==2.42.0 google-cloud-core==2.4.3 # via google-cloud-firestore google-cloud-firestore==2.21.0 - # via -r src/utils/web_search/requirements_app.in + # via -r aieng-agents-utils/aieng/agents/web_search/requirements_app.in google-genai==1.46.0 - # via -r src/utils/web_search/requirements_app.in + # via -r aieng-agents-utils/aieng/agents/web_search/requirements_app.in googleapis-common-protos==1.71.0 # via # google-api-core @@ -109,7 +109,7 @@ pyasn1-modules==0.4.2 # via google-auth pydantic==2.12.3 # via - # -r src/utils/web_search/requirements_app.in + # -r aieng-agents-utils/aieng/agents/web_search/requirements_app.in # fastapi # fastapi-cloud-cli # google-genai diff --git a/src/utils/web_search/requirements_app.in b/aieng-agents-utils/aieng/agents/web_search/requirements_app.in similarity index 100% rename from src/utils/web_search/requirements_app.in rename to aieng-agents-utils/aieng/agents/web_search/requirements_app.in diff --git a/aieng-agents-utils/pyproject.toml b/aieng-agents-utils/pyproject.toml new file mode 100644 index 0000000..a370395 --- /dev/null +++ b/aieng-agents-utils/pyproject.toml @@ -0,0 +1,57 @@ +[project] +name = "aieng-agents-utils" +version = "0.1.0" +description = "Helper modules for Vector Institute AI Engineering Agents Bootcamp implementations" +authors = [{name = "Vector AI Engineering", email = "ai_engineering@vectorinstitute.ai"}] +requires-python = ">=3.12" +readme = "README.md" +license = "MIT" +dependencies = [ + "backoff>=2.2.1", + "beautifulsoup4>=4.13.4", + "click>=8.3.0", + "datasets>=4.4.0", + "e2b-code-interpreter>=2.3.0", + "fastapi[standard]>=0.116.1", + "google-cloud-firestore>=2.21.0", + "google-genai>=1.46.0", + "gradio>=6.1.0", + "httpx>=0.28.1", + "langfuse>=3.9.0", + "lxml>=6.0.0", + "nest-asyncio>=1.6.0", + "openai>=2.6.0", + "openai-agents>=0.4.0", + "pandas>=2.3.3", + "pillow>=11.3.0", + "pydantic>=2.11.7", + "pydantic-ai-slim[logfire]>=0.3.7", + "pymupdf>=1.26.7", + "simplejson>=3.20.2", + "transformers>=4.54.1", + "weaviate-client>=4.15.4", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.sdist] +include = ["aieng/"] + +[tool.hatch.build.targets.wheel] +include = ["aieng/"] + +[dependency-groups] +dev = [ + "pytest>=8.3.4", + "pytest-asyncio>=1.2.0", +] + +# Default dependency groups to be installed +[tool.uv] +default-groups = ["dev"] + +[project.scripts] +pdf_to_hf_dataset = "aieng.agents.data.pdf_to_hf_dataset:main" +chunk_hf_dataset = "aieng.agents.data.chunk_hf_dataset:main" diff --git a/aieng-agents-utils/tests/README.md b/aieng-agents-utils/tests/README.md new file mode 100644 index 0000000..36ecc16 --- /dev/null +++ b/aieng-agents-utils/tests/README.md @@ -0,0 +1,10 @@ +# Unit tests + +```bash +uv run --env-file .env pytest -sv aieng-agents-utils/tests/data/test_load_hf.py +uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_weaviate.py +uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_code_interpreter.py +uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_gemini_grounding.py +uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_get_news_events.py +uv run --env-file .env pytest -sv aieng-agents-utils/tests/web_search_test_web_search_auth.py +``` diff --git a/tests/data_tests/test_load_hf.py b/aieng-agents-utils/tests/data/test_load_hf.py similarity index 82% rename from tests/data_tests/test_load_hf.py rename to aieng-agents-utils/tests/data/test_load_hf.py index f2e6e64..0c8ac6b 100644 --- a/tests/data_tests/test_load_hf.py +++ b/aieng-agents-utils/tests/data/test_load_hf.py @@ -1,11 +1,10 @@ """Test Loading HuggingFace datasets.""" import pandas as pd +from aieng.agents.data import get_dataset, get_dataset_url_hash -from src.utils.data import get_dataset, get_dataset_url_hash - -def test_load_from_hub_unspecified_subset(): +def test_load_from_hub_unspecified_subset() -> None: """Test loading dataset from hub, no subset specified.""" url = "hf://vector-institute/hotpotqa@d997ecf:train" rows_limit = 18 @@ -15,7 +14,7 @@ def test_load_from_hub_unspecified_subset(): assert len(dataset) == rows_limit -def test_load_from_hub_named_subset(): +def test_load_from_hub_named_subset() -> None: """Test loading dataset from hub, no subset specified.""" url = "hf://vector-institute/hotpotqa@d997ecf:train" rows_limit = 18 diff --git a/tests/tool_tests/example_files/example_a.csv b/aieng-agents-utils/tests/example_files/example_a.csv similarity index 100% rename from tests/tool_tests/example_files/example_a.csv rename to aieng-agents-utils/tests/example_files/example_a.csv diff --git a/tests/tool_tests/test_code_interpreter.py b/aieng-agents-utils/tests/tools/test_code_interpreter.py similarity index 81% rename from tests/tool_tests/test_code_interpreter.py rename to aieng-agents-utils/tests/tools/test_code_interpreter.py index 6318c7b..ad7cfa8 100644 --- a/tests/tool_tests/test_code_interpreter.py +++ b/aieng-agents-utils/tests/tools/test_code_interpreter.py @@ -3,12 +3,8 @@ from pathlib import Path import pytest - -from src.utils import pretty_print -from src.utils.tools.code_interpreter import ( - CodeInterpreter, - CodeInterpreterOutput, -) +from aieng.agents import pretty_print +from aieng.agents.tools.code_interpreter import CodeInterpreter, CodeInterpreterOutput PANDAS_VERSION_SCRIPT = """\ @@ -28,7 +24,7 @@ @pytest.mark.asyncio -async def test_code_interpreter(): +async def test_code_interpreter() -> None: """Test running a Python command in the interpreter.""" session = CodeInterpreter(timeout_seconds=15) @@ -42,7 +38,7 @@ async def test_code_interpreter(): @pytest.mark.asyncio -async def test_jupyter_command(): +async def test_jupyter_command() -> None: """Test running a Python command in the interpreter.""" session = CodeInterpreter(timeout_seconds=15) @@ -53,9 +49,9 @@ async def test_jupyter_command(): @pytest.mark.asyncio -async def test_code_interpreter_upload_file(): +async def test_code_interpreter_upload_file() -> None: """Test running a Python command in the interpreter.""" - example_paths = [Path("tests/tool_tests/example_files/example_a.csv")] + example_paths = [Path("aieng-agents-utils/tests/example_files/example_a.csv")] for _path in example_paths: assert _path.exists() diff --git a/tests/tool_tests/test_gemini_grounding.py b/aieng-agents-utils/tests/tools/test_gemini_grounding.py similarity index 77% rename from tests/tool_tests/test_gemini_grounding.py rename to aieng-agents-utils/tests/tools/test_gemini_grounding.py index 2f9a713..9b44d0b 100644 --- a/tests/tool_tests/test_gemini_grounding.py +++ b/aieng-agents-utils/tests/tools/test_gemini_grounding.py @@ -3,13 +3,12 @@ import os import pytest - -from src.utils import pretty_print -from src.utils.tools.gemini_grounding import GeminiGroundingWithGoogleSearch +from aieng.agents import pretty_print +from aieng.agents.tools.gemini_grounding import GeminiGroundingWithGoogleSearch @pytest.mark.asyncio -async def test_web_search_with_gemini_grounding(): +async def test_web_search_with_gemini_grounding() -> None: """Test Gemini grounding with Google Search integration.""" # Check if the environment variable is set assert os.getenv("WEB_SEARCH_BASE_URL") diff --git a/tests/tool_tests/test_get_news_events.py b/aieng-agents-utils/tests/tools/test_get_news_events.py similarity index 85% rename from tests/tool_tests/test_get_news_events.py rename to aieng-agents-utils/tests/tools/test_get_news_events.py index 4d71b31..8986de7 100644 --- a/tests/tool_tests/test_get_news_events.py +++ b/aieng-agents-utils/tests/tools/test_get_news_events.py @@ -1,12 +1,11 @@ """Test the tool for getting news events.""" import pytest - -from src.utils.tools import get_news_events +from aieng.agents.tools import get_news_events @pytest.mark.asyncio -async def test_get_news_events(): +async def test_get_news_events() -> None: """Test tool for retrieving news events from enwiki.""" events_by_category = await get_news_events() all_events = [item for items in events_by_category.root.values() for item in items] diff --git a/tests/tool_tests/test_weaviate.py b/aieng-agents-utils/tests/tools/test_weaviate.py similarity index 79% rename from tests/tool_tests/test_weaviate.py rename to aieng-agents-utils/tests/tools/test_weaviate.py index 474a93b..efdd363 100644 --- a/tests/tool_tests/test_weaviate.py +++ b/aieng-agents-utils/tests/tools/test_weaviate.py @@ -1,28 +1,28 @@ """Test cases for Weaviate integration.""" +from typing import Any, AsyncGenerator + import pytest import pytest_asyncio -from dotenv import load_dotenv - -from src.utils import ( +from aieng.agents import Configs, pretty_print +from aieng.agents.tools.weaviate_kb import ( AsyncWeaviateKnowledgeBase, - Configs, get_weaviate_async_client, - pretty_print, ) +from dotenv import load_dotenv load_dotenv(verbose=True) @pytest.fixture() -def configs(): +def configs() -> Any: """Load env var configs for testing.""" return Configs() @pytest_asyncio.fixture() -async def weaviate_kb(configs): +async def weaviate_kb(configs) -> AsyncGenerator[Any, Any]: """Weaviate knowledgebase for testing.""" async_client = get_weaviate_async_client(configs) @@ -34,7 +34,7 @@ async def weaviate_kb(configs): @pytest.mark.asyncio -async def test_weaviate_kb(weaviate_kb: AsyncWeaviateKnowledgeBase): +async def test_weaviate_kb(weaviate_kb: AsyncWeaviateKnowledgeBase) -> None: """Test weaviate knowledgebase integration.""" responses = await weaviate_kb.search_knowledgebase("What is Toronto known for?") assert len(responses) > 0 diff --git a/tests/test_web_search_auth.py b/aieng-agents-utils/tests/web_search/test_web_search_auth.py similarity index 99% rename from tests/test_web_search_auth.py rename to aieng-agents-utils/tests/web_search/test_web_search_auth.py index 7afb4b4..088c61e 100644 --- a/tests/test_web_search_auth.py +++ b/aieng-agents-utils/tests/web_search/test_web_search_auth.py @@ -5,9 +5,8 @@ from typing import Optional import pytest - -from src.utils.web_search import auth -from src.utils.web_search.db import ( +from aieng.agents.web_search import auth +from aieng.agents.web_search.db import ( APIKeyNotFoundError, APIKeyRecord, Status, diff --git a/pyproject.toml b/pyproject.toml index 29671b9..bf8184b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,28 +1,16 @@ [project] -name = "agent-bootcamp-202507" +name = "agent-bootcamp" version = "0.1.0" -description = "Vector Institute Agent Bootcamp 202507" +description = "Vector Institute Agent Bootcamp" readme = "README.md" authors = [ {name = "Vector AI Engineering", email = "ai_engineering@vectorinstitute.ai"}] -license = "Apache-2.0" +license = "MIT" requires-python = ">=3.12" dependencies = [ - "aiohttp>=3.12.14", - "beautifulsoup4>=4.13.4", - "datasets>=4.4.0", - "e2b-code-interpreter>=2.3.0", - "gradio>=6.1.0", - "langfuse>=3.9.0", - "lxml>=6.0.0", - "nest-asyncio>=1.6.0", + "aieng-agents-utils>=0.1.0", "numpy<2.3.0", - "openai>=2.6.0", - "openai-agents>=0.4.0", "plotly>=6.2.0", - "pydantic>=2.11.7", - "pydantic-ai-slim[logfire]>=0.3.7", "scikit-learn>=1.7.0", - "weaviate-client>=4.15.4", ] [build-system] @@ -44,13 +32,11 @@ dev = [ "nbqa>=1.9.1", "pip-audit>=2.7.3", "pre-commit>=4.1.0", - "pymupdf>=1.26.7", "pytest>=8.3.4", "pytest-asyncio>=1.2.0", "pytest-cov>=7.0.0", "pytest-mock>=3.14.0", "ruff>=0.12.2", - "transformers>=4.54.1", ] docs = [ "jinja2>=3.1.6", # Pinning version to address vulnerability GHSA-cpwx-vrp4-4pq7 @@ -61,17 +47,19 @@ docs = [ "ipykernel>=6.29.5", "ipython>=9.4.0", ] -web-search = [ - "google-cloud-firestore>=2.21.0", - "fastapi[standard]>=0.116.1", - "google-genai>=1.46.0", - "simplejson>=3.20.2", -] # Default dependency groups to be installed [tool.uv] default-groups = ["dev", "docs"] +[tool.uv.workspace] +members = [ + "aieng-agents-utils", +] + +[tool.uv.sources] +aieng-agents-utils = { workspace = true } + [tool.ruff] include = ["*.py", "pyproject.toml", "*.ipynb"] line-length = 88 @@ -119,8 +107,6 @@ ignore = [ [tool.ruff.lint.per-file-ignores] "__init__.py" = ["E402", "F401", "F403", "F811", "D104"] - - [tool.ruff.lint.pep8-naming] ignore-names = ["X*", "setUp"] @@ -140,5 +126,5 @@ markers = [ [tool.coverage] [tool.coverage.run] - source=["aieng_template"] - omit=["tests/*", "*__init__.py"] + source=["aieng-agents-utils/aieng"] + omit=["aieng-agents-utils/aieng/tests/*", "tests/*", "*__init__.py"] diff --git a/src/1_basics/0_search_demo/app.py b/src/1_basics/0_search_demo/app.py index 4c29510..fc4b5c0 100644 --- a/src/1_basics/0_search_demo/app.py +++ b/src/1_basics/0_search_demo/app.py @@ -3,10 +3,9 @@ import asyncio import gradio as gr +from aieng.agents import AsyncClientManager, pretty_print from dotenv import load_dotenv -from src.utils import AsyncClientManager, pretty_print - DESCRIPTION = """\ In the example below, your goal is to find out where \ diff --git a/src/1_basics/1_react_rag/app.py b/src/1_basics/1_react_rag/app.py index 354ca46..04d1fcd 100644 --- a/src/1_basics/1_react_rag/app.py +++ b/src/1_basics/1_react_rag/app.py @@ -8,12 +8,11 @@ from typing import TYPE_CHECKING, Any, AsyncGenerator import gradio as gr +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.prompts import REACT_INSTRUCTIONS from dotenv import load_dotenv from gradio.components.chatbot import ChatMessage -from src.prompts import REACT_INSTRUCTIONS -from src.utils.client_manager import AsyncClientManager - if TYPE_CHECKING: from openai.types.chat import ( diff --git a/src/1_basics/1_react_rag/cli.py b/src/1_basics/1_react_rag/cli.py index 007e466..f6ff6ef 100644 --- a/src/1_basics/1_react_rag/cli.py +++ b/src/1_basics/1_react_rag/cli.py @@ -4,14 +4,11 @@ import json from typing import TYPE_CHECKING +from aieng.agents import pretty_print +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.prompts import REACT_INSTRUCTIONS from dotenv import load_dotenv -from src.prompts import REACT_INSTRUCTIONS -from src.utils import ( - AsyncClientManager, - pretty_print, -) - if TYPE_CHECKING: from openai.types.chat import ChatCompletionToolParam diff --git a/src/2_frameworks/1_react_rag/app.py b/src/2_frameworks/1_react_rag/app.py index 744a99d..37fde76 100644 --- a/src/2_frameworks/1_react_rag/app.py +++ b/src/2_frameworks/1_react_rag/app.py @@ -6,15 +6,16 @@ import agents import gradio as gr +from aieng.agents import ( + get_or_create_agent_session, + oai_agent_stream_to_gradio_messages, +) +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.prompts import REACT_INSTRUCTIONS from dotenv import load_dotenv from gradio.components.chatbot import ChatMessage -from src.prompts import REACT_INSTRUCTIONS -from src.utils import oai_agent_stream_to_gradio_messages -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG - async def _main( query: str, history: list[ChatMessage], session_state: dict[str, Any] @@ -26,7 +27,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) # Define an agent using the OpenAI Agent SDK main_agent = agents.Agent( diff --git a/src/2_frameworks/1_react_rag/cli.py b/src/2_frameworks/1_react_rag/cli.py index f755b7d..c9e6e62 100644 --- a/src/2_frameworks/1_react_rag/cli.py +++ b/src/2_frameworks/1_react_rag/cli.py @@ -10,46 +10,47 @@ Runner, function_tool, ) +from aieng.agents import pretty_print +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.prompts import REACT_INSTRUCTIONS from dotenv import load_dotenv -from src.prompts import REACT_INSTRUCTIONS -from src.utils import pretty_print -from src.utils.client_manager import AsyncClientManager - async def _main(query: str) -> None: - wikipedia_agent = Agent( - name="Wikipedia Agent", - instructions=REACT_INSTRUCTIONS, - tools=[function_tool(client_manager.knowledgebase.search_knowledgebase)], - model=OpenAIChatCompletionsModel( - model=client_manager.configs.default_worker_model, - openai_client=client_manager.openai_client, - ), - ) - - response = await Runner.run( - wikipedia_agent, - input=query, - run_config=no_tracing_config, - ) - - for item in response.new_items: - pretty_print(item.raw_item) - print() - - pretty_print(response.final_output) - - # Uncomment the following for a basic "streaming" example - - # from src.utils import oai_agent_stream_to_gradio_messages - # result_stream = Runner.run_streamed( - # wikipedia_agent, input=query, run_config=no_tracing_config - # ) - # async for event in result_stream.stream_events(): - # event_parsed = oai_agent_stream_to_gradio_messages(event) - # if len(event_parsed) > 0: - # pretty_print(event_parsed) + try: + wikipedia_agent = Agent( + name="Wikipedia Agent", + instructions=REACT_INSTRUCTIONS, + tools=[function_tool(client_manager.knowledgebase.search_knowledgebase)], + model=OpenAIChatCompletionsModel( + model=client_manager.configs.default_worker_model, + openai_client=client_manager.openai_client, + ), + ) + + response = await Runner.run( + wikipedia_agent, input=query, run_config=no_tracing_config + ) + + for item in response.new_items: + pretty_print(item.raw_item) + print() + + pretty_print(response.final_output) + + # Uncomment the following for a basic "streaming" example + + # from src.utils import oai_agent_stream_to_gradio_messages + # result_stream = Runner.run_streamed( + # wikipedia_agent, input=query, run_config=no_tracing_config + # ) + # async for event in result_stream.stream_events(): + # event_parsed = oai_agent_stream_to_gradio_messages(event) + # if len(event_parsed) > 0: + # pretty_print(event_parsed) + finally: + # Ensure clients are closed on exit + await client_manager.close() if __name__ == "__main__": diff --git a/src/2_frameworks/1_react_rag/langfuse_gradio.py b/src/2_frameworks/1_react_rag/langfuse_gradio.py index 1acae71..85b3b72 100644 --- a/src/2_frameworks/1_react_rag/langfuse_gradio.py +++ b/src/2_frameworks/1_react_rag/langfuse_gradio.py @@ -8,21 +8,19 @@ import agents import gradio as gr -from dotenv import load_dotenv -from gradio.components.chatbot import ChatMessage -from langfuse import propagate_attributes - -from src.prompts import REACT_INSTRUCTIONS -from src.utils import ( +from aieng.agents import ( + get_or_create_agent_session, oai_agent_stream_to_gradio_messages, pretty_print, set_up_logging, - setup_langfuse_tracer, ) -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG -from src.utils.langfuse.shared_client import langfuse_client +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from aieng.agents.prompts import REACT_INSTRUCTIONS +from dotenv import load_dotenv +from gradio.components.chatbot import ChatMessage +from langfuse import propagate_attributes async def _main( @@ -35,7 +33,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) # Define an agent using the OpenAI Agent SDK main_agent = agents.Agent( diff --git a/src/2_frameworks/2_multi_agent/efficient.py b/src/2_frameworks/2_multi_agent/efficient.py index e1ec98d..9c8caea 100644 --- a/src/2_frameworks/2_multi_agent/efficient.py +++ b/src/2_frameworks/2_multi_agent/efficient.py @@ -11,20 +11,18 @@ import agents import gradio as gr -from dotenv import load_dotenv -from gradio.components.chatbot import ChatMessage -from langfuse import propagate_attributes - -from src.prompts import REACT_INSTRUCTIONS -from src.utils import ( +from aieng.agents import ( + get_or_create_agent_session, oai_agent_stream_to_gradio_messages, set_up_logging, - setup_langfuse_tracer, ) -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG -from src.utils.langfuse.shared_client import langfuse_client +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from aieng.agents.prompts import REACT_INSTRUCTIONS, SEARCH_AGENT_INSTRUCTIONS +from dotenv import load_dotenv +from gradio.components.chatbot import ChatMessage +from langfuse import propagate_attributes async def _main( @@ -37,7 +35,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) # Use the main agent as the entry point- not the worker agent. with ( @@ -87,15 +85,7 @@ async def _main( # Worker Agent: handles long context efficiently search_agent = agents.Agent( name="SearchAgent", - instructions=( - "You are a search agent. You receive a single search query as input. " - "Use the search tool to perform a search, then produce a concise " - "'search summary' of the key findings. " - "For every fact you include in the summary, ALWAYS include a citation " - "both in-line and at the end of the summary as a numbered list. The " - "citation at the end should include relevant metadata from the search " - "results. Do NOT return raw search results. " - ), + instructions=SEARCH_AGENT_INSTRUCTIONS, tools=[ agents.function_tool(client_manager.knowledgebase.search_knowledgebase), ], diff --git a/src/2_frameworks/2_multi_agent/efficient_multiple_kbs.py b/src/2_frameworks/2_multi_agent/efficient_multiple_kbs.py index 1f3444f..8107b65 100644 --- a/src/2_frameworks/2_multi_agent/efficient_multiple_kbs.py +++ b/src/2_frameworks/2_multi_agent/efficient_multiple_kbs.py @@ -5,23 +5,22 @@ import agents import gradio as gr -from dotenv import load_dotenv -from gradio.components.chatbot import ChatMessage -from langfuse import propagate_attributes - -from src.utils import ( +from aieng.agents import ( + get_or_create_agent_session, oai_agent_stream_to_gradio_messages, set_up_logging, - setup_langfuse_tracer, ) -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG -from src.utils.langfuse.shared_client import langfuse_client -from src.utils.tools.gemini_grounding import ( +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from aieng.agents.prompts import WIKI_AND_WEB_ORCHESTRATOR_INSTRUCTIONS +from aieng.agents.tools.gemini_grounding import ( GeminiGroundingWithGoogleSearch, ModelSettings, ) +from dotenv import load_dotenv +from gradio.components.chatbot import ChatMessage +from langfuse import propagate_attributes async def _main( @@ -34,7 +33,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) # Use the main agent as the entry point- not the worker agent. with ( @@ -112,53 +111,7 @@ async def _main( # Main Agent: more expensive and slower, but better at complex planning main_agent = agents.Agent( name="MainAgent", - instructions=""" - You are a deep research agent and your goal is to conduct in-depth, multi-turn - research by breaking down complex queries, using the provided tools, and - synthesizing the information into a comprehensive report. - - You have access to the following tools: - 1. 'search_knowledgebase' - use this tool to search for information in a - knowledge base. The knowledge base reflects a subset of Wikipedia as - of May 2025. - 2. 'get_web_search_grounded_response' - use this tool for current events, - news, fact-checking or when the information in the knowledge base is - not sufficient to answer the question. - - Both tools will not return raw search results or the sources themselves. - Instead, they will return a concise summary of the key findings, along - with the sources used to generate the summary. - - For best performance, divide complex queries into simpler sub-queries - Before calling either tool, always explain your reasoning for doing so. - - Note that the 'get_web_search_grounded_response' tool will expand the query - into multiple search queries and execute them. It will also return the - queries it executed. Do not repeat them. - - **Routing Guidelines:** - - When answering a question, you should first try to use the 'search_knowledgebase' - tool, unless the question requires recent information after May 2025 or - has explicit recency cues. - - If either tool returns insufficient information for a given query, try - reformulating or using the other tool. You can call either tool multiple - times to get the information you need to answer the user's question. - - **Guidelines for synthesis** - - After collecting results, write the final answer from your own synthesis. - - Add a "Sources" section listing unique sources, formatted as: - [1] Publisher - URL - [2] Wikipedia: (Section:
) - Order by first mention in your text. Every factual sentence in your final - response must map to at least one source. - - If web and knowledge base disagree, surface the disagreement and prefer sources - with newer publication dates. - - Do not invent URLs or sources. - - If both tools fail, say so and suggest 2–3 refined queries. - - Be sure to mention the sources in your response, including the URL if available, - and do not make up information. - """, + instructions=WIKI_AND_WEB_ORCHESTRATOR_INSTRUCTIONS, # Allow the planner agent to invoke the worker agent. # The long context provided to the worker agent is hidden from the main agent. tools=[ diff --git a/src/2_frameworks/2_multi_agent/fan_out.py b/src/2_frameworks/2_multi_agent/fan_out.py index b740013..3cd04d6 100644 --- a/src/2_frameworks/2_multi_agent/fan_out.py +++ b/src/2_frameworks/2_multi_agent/fan_out.py @@ -26,11 +26,9 @@ import datasets import openai import pydantic - -from src.utils import set_up_logging, setup_langfuse_tracer -from src.utils.async_utils import gather_with_progress, rate_limited -from src.utils.client_manager import AsyncClientManager -from src.utils.langfuse.shared_client import langfuse_client +from aieng.agents import gather_with_progress, rate_limited, set_up_logging +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer MAX_CONCURRENCY = {"worker": 50, "reviewer": 50} diff --git a/src/2_frameworks/2_multi_agent/verbose.py b/src/2_frameworks/2_multi_agent/verbose.py index 29d426d..4418109 100644 --- a/src/2_frameworks/2_multi_agent/verbose.py +++ b/src/2_frameworks/2_multi_agent/verbose.py @@ -12,58 +12,25 @@ import agents import gradio as gr +from aieng.agents import ( + get_or_create_agent_session, + oai_agent_items_to_gradio_messages, + pretty_print, + set_up_logging, +) +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from aieng.agents.prompts import ( + KB_RESEARCHER_INSTRUCTIONS, + WIKI_SEARCH_PLANNER_INSTRUCTIONS, + WRITER_INSTRUCTIONS, +) from dotenv import load_dotenv from gradio.components.chatbot import ChatMessage from langfuse import propagate_attributes from pydantic import BaseModel -from src.utils import ( - oai_agent_items_to_gradio_messages, - pretty_print, - setup_langfuse_tracer, -) -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG -from src.utils.langfuse.shared_client import langfuse_client -from src.utils.logging import set_up_logging - - -PLANNER_INSTRUCTIONS = """\ -You are a research planner. \ -Given a user's query, produce a list of search terms that can be used to retrieve -relevant information from a knowledge base to answer the question. \ -As you are not able to clarify from the user what they are looking for, \ -your search terms should be broad and cover various aspects of the query. \ -Output up to 10 search terms to query the knowledge base. \ -Note that the knowledge base is a Wikipedia dump and cuts off at May 2025. -""" - -RESEARCHER_INSTRUCTIONS = """\ -You are a research assistant with access to a knowledge base. \ -Given a potentially broad search term, your task is to use the search tool to \ -retrieve relevant information from the knowledge base and produce a short \ -summary of at most 300 words. You must pass the initial search term directly to \ -the search tool without any modifications and, only if necessary, refine your \ -search based on the results you get back. Your summary must be based solely on \ -a synthesis of all the search results and should not include any information that \ -is not present in the search results. For every fact you include in the summary, \ -ALWAYS include a citation both in-line and at the end of the summary as a numbered \ -list. The citation at the end should include relevant metadata from the search \ -results. Do NOT return raw search results. -""" - -WRITER_INSTRUCTIONS = """\ -You are an expert at synthesizing information and writing coherent reports. \ -Given a user's query and a set of search summaries, synthesize these into a \ -coherent report that answers the user's question. The length of the report should be \ -proportional to the complexity of the question. For queries that are more complex, \ -ensure that the report is well-structured, with clear sections and headings where \ -appropriate. Make sure to use the citations from the search summaries to back up \ -any factual claims you make. \ -Do not make up any information outside of the search summaries. -""" - class SearchItem(BaseModel): """A single search item in the search plan.""" @@ -139,7 +106,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) with ( langfuse_client.start_as_current_observation( @@ -242,7 +209,7 @@ async def _main( planner_agent = agents.Agent( name="Planner Agent", - instructions=PLANNER_INSTRUCTIONS, + instructions=WIKI_SEARCH_PLANNER_INSTRUCTIONS, model=agents.OpenAIChatCompletionsModel( model=planner_model, openai_client=client_manager.openai_client, @@ -252,7 +219,7 @@ async def _main( research_agent = agents.Agent( name="Research Agent", - instructions=RESEARCHER_INSTRUCTIONS, + instructions=KB_RESEARCHER_INSTRUCTIONS, tools=[agents.function_tool(client_manager.knowledgebase.search_knowledgebase)], model=agents.OpenAIChatCompletionsModel( model=worker_model, diff --git a/src/2_frameworks/3_code_interpreter/app.py b/src/2_frameworks/3_code_interpreter/app.py index 893b0a7..efa0971 100644 --- a/src/2_frameworks/3_code_interpreter/app.py +++ b/src/2_frameworks/3_code_interpreter/app.py @@ -11,40 +11,20 @@ import agents import gradio as gr -from dotenv import load_dotenv -from gradio.components.chatbot import ChatMessage -from langfuse import propagate_attributes - -from src.utils import ( - CodeInterpreter, +from aieng.agents import ( + get_or_create_agent_session, oai_agent_stream_to_gradio_messages, + pretty_print, set_up_logging, ) -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG -from src.utils.langfuse.oai_sdk_setup import setup_langfuse_tracer -from src.utils.langfuse.shared_client import langfuse_client -from src.utils.pretty_printing import pretty_print - - -CODE_INTERPRETER_INSTRUCTIONS = """\ -The `code_interpreter` tool executes Python commands. \ -Please note that data is not persisted. Each time you invoke this tool, \ -you will need to run import and define all variables from scratch. - -You can access the local filesystem using this tool. \ -Instead of asking the user for file inputs, you should try to find the file \ -using this tool. - -Recommended packages: Pandas, Numpy, SymPy, Scikit-learn, Matplotlib, Seaborn. - -Use Matplotlib to create visualizations. Make sure to call `plt.show()` so that -the plot is captured and returned to the user. - -You can also run Jupyter-style shell commands (e.g., `!pip freeze`) -but you won't be able to install packages. -""" +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from aieng.agents.prompts import CODE_INTERPRETER_INSTRUCTIONS +from aieng.agents.tools import CodeInterpreter +from dotenv import load_dotenv +from gradio.components.chatbot import ChatMessage +from langfuse import propagate_attributes async def _main( @@ -57,7 +37,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) with ( langfuse_client.start_as_current_observation( @@ -102,7 +82,7 @@ async def _main( code_interpreter = CodeInterpreter( local_files=[ Path("sandbox_content/"), - Path("tests/tool_tests/example_files/example_a.csv"), + Path("aieng-agents-utils/tests/example_files/example_a.csv"), ] ) diff --git a/src/2_frameworks/4_mcp/app.py b/src/2_frameworks/4_mcp/app.py index d3e71b9..599f7d6 100644 --- a/src/2_frameworks/4_mcp/app.py +++ b/src/2_frameworks/4_mcp/app.py @@ -10,20 +10,18 @@ import agents import gradio as gr from agents.mcp import MCPServerStdio, create_static_tool_filter -from dotenv import load_dotenv -from gradio.components.chatbot import ChatMessage -from langfuse import propagate_attributes - -from src.utils import ( +from aieng.agents import ( + get_or_create_agent_session, oai_agent_stream_to_gradio_messages, pretty_print, set_up_logging, ) -from src.utils.agent_session import get_or_create_session -from src.utils.client_manager import AsyncClientManager -from src.utils.gradio import COMMON_GRADIO_CONFIG -from src.utils.langfuse.oai_sdk_setup import setup_langfuse_tracer -from src.utils.langfuse.shared_client import langfuse_client +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.gradio import COMMON_GRADIO_CONFIG +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from dotenv import load_dotenv +from gradio.components.chatbot import ChatMessage +from langfuse import propagate_attributes async def _main( @@ -37,7 +35,7 @@ async def _main( # conversation history across multiple turns of a chat # This makes it possible to ask follow-up questions that refer to # previous turns in the conversation - session = get_or_create_session(history, session_state) + session = get_or_create_agent_session(history, session_state) # Get the absolute path to the current git repository, regardless of where # the script is run from diff --git a/src/3_evals/1_llm_judge/run_eval.py b/src/3_evals/1_llm_judge/run_eval.py index a6beb0b..32bb959 100644 --- a/src/3_evals/1_llm_judge/run_eval.py +++ b/src/3_evals/1_llm_judge/run_eval.py @@ -5,18 +5,13 @@ import agents import pydantic +from aieng.agents import gather_with_progress, set_up_logging, setup_langfuse_tracer +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.langfuse import flush_langfuse, langfuse_client from dotenv import load_dotenv from langfuse._client.datasets import DatasetItemClient from rich.progress import track -from src.utils import ( - gather_with_progress, - set_up_logging, - setup_langfuse_tracer, -) -from src.utils.client_manager import AsyncClientManager -from src.utils.langfuse.shared_client import flush_langfuse, langfuse_client - SYSTEM_MESSAGE = """\ Answer the question using the search tool. \ diff --git a/src/3_evals/1_llm_judge/upload_data.py b/src/3_evals/1_llm_judge/upload_data.py index b6ecbf7..9eb5ffb 100644 --- a/src/3_evals/1_llm_judge/upload_data.py +++ b/src/3_evals/1_llm_judge/upload_data.py @@ -6,14 +6,12 @@ import argparse +from aieng.agents import Configs +from aieng.agents.data import get_dataset, get_dataset_url_hash +from aieng.agents.langfuse import langfuse_client, set_up_langfuse_otlp_env_vars from dotenv import load_dotenv from rich.progress import track -from src.utils.data import get_dataset, get_dataset_url_hash -from src.utils.env_vars import Configs -from src.utils.langfuse.otlp_env_setup import set_up_langfuse_otlp_env_vars -from src.utils.langfuse.shared_client import langfuse_client - if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/src/3_evals/2_synthetic_data/README.md b/src/3_evals/2_synthetic_data/README.md index 99a1b4e..ed682f4 100644 --- a/src/3_evals/2_synthetic_data/README.md +++ b/src/3_evals/2_synthetic_data/README.md @@ -1,7 +1,7 @@ # Generate synthetic data using Agent Pipeline ```bash -uv run -m src.3_evals.2_synthetic_data.synthesize_data \ +uv run --env-file .env -m src.3_evals.2_synthetic_data.synthesize_data \ --source_dataset hf://vector-institute/hotpotqa@d997ecf:train \ --langfuse_dataset_name search-dataset-synthetic-20250609 \ --limit 18 diff --git a/src/3_evals/2_synthetic_data/annotate_diversity.py b/src/3_evals/2_synthetic_data/annotate_diversity.py index 2e9ed3d..f6a73a5 100644 --- a/src/3_evals/2_synthetic_data/annotate_diversity.py +++ b/src/3_evals/2_synthetic_data/annotate_diversity.py @@ -17,12 +17,12 @@ import numpy as np import pandas as pd import pydantic +from aieng.agents import Configs, gather_with_progress +from aieng.agents.data import create_batches +from aieng.agents.langfuse import flush_langfuse, langfuse_client from openai import AsyncOpenAI from rich.progress import track -from src.utils import Configs, create_batches, gather_with_progress -from src.utils.langfuse.shared_client import flush_langfuse, langfuse_client - if TYPE_CHECKING: from langfuse._client.datasets import DatasetItemClient diff --git a/src/3_evals/2_synthetic_data/gradio_visualize_diversity.py b/src/3_evals/2_synthetic_data/gradio_visualize_diversity.py index 5f94a81..a17ac07 100644 --- a/src/3_evals/2_synthetic_data/gradio_visualize_diversity.py +++ b/src/3_evals/2_synthetic_data/gradio_visualize_diversity.py @@ -12,14 +12,14 @@ import gradio as gr import numpy as np import plotly.express as px +from aieng.agents import Configs, gather_with_progress +from aieng.agents.data import create_batches +from aieng.agents.langfuse import langfuse_client from openai import AsyncOpenAI from plotly.graph_objs import Figure from sklearn.decomposition import PCA from sklearn.manifold import TSNE -from src.utils import Configs, create_batches, gather_with_progress -from src.utils.langfuse.shared_client import langfuse_client - def reduce_dimensions( embeddings: np.ndarray, method: str = "tsne", n_components: int = 2 diff --git a/src/3_evals/2_synthetic_data/synthesize_data.py b/src/3_evals/2_synthetic_data/synthesize_data.py index b5ad73f..09c4cc3 100644 --- a/src/3_evals/2_synthetic_data/synthesize_data.py +++ b/src/3_evals/2_synthetic_data/synthesize_data.py @@ -13,20 +13,19 @@ import agents import pydantic -from dotenv import load_dotenv -from rich.progress import track - -from src.utils import ( +from aieng.agents import ( gather_with_progress, pretty_print, rate_limited, set_up_logging, setup_langfuse_tracer, ) -from src.utils.client_manager import AsyncClientManager -from src.utils.data import get_dataset, get_dataset_url_hash -from src.utils.langfuse.shared_client import langfuse_client -from src.utils.tools.news_events import NewsEvent, get_news_events +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.data import get_dataset, get_dataset_url_hash +from aieng.agents.langfuse import langfuse_client +from aieng.agents.tools import NewsEvent, get_news_events +from dotenv import load_dotenv +from rich.progress import track SYSTEM_MESSAGE = """\ diff --git a/src/3_evals/2_synthetic_data/synthesize_data_e2b.py b/src/3_evals/2_synthetic_data/synthesize_data_e2b.py index 1f1d5dd..2230ead 100644 --- a/src/3_evals/2_synthetic_data/synthesize_data_e2b.py +++ b/src/3_evals/2_synthetic_data/synthesize_data_e2b.py @@ -24,20 +24,18 @@ import agents import pydantic -from dotenv import load_dotenv -from rich.progress import track - -from src.utils import ( - CodeInterpreter, +from aieng.agents import ( gather_with_progress, pretty_print, rate_limited, set_up_logging, - setup_langfuse_tracer, ) -from src.utils.client_manager import AsyncClientManager -from src.utils.data import get_dataset_url_hash -from src.utils.langfuse.shared_client import langfuse_client +from aieng.agents.client_manager import AsyncClientManager +from aieng.agents.data import get_dataset_url_hash +from aieng.agents.langfuse import langfuse_client, setup_langfuse_tracer +from aieng.agents.tools import CodeInterpreter +from dotenv import load_dotenv +from rich.progress import track SYSTEM_MESSAGE = """\ @@ -129,7 +127,7 @@ async def generate_synthetic_test_cases( template_name=client_manager.configs.default_code_interpreter_template, local_files=[ Path("sandbox_content/"), - Path("tests/tool_tests/example_files/example_a.csv"), + Path("aieng-agents-utils/tests/example_files/example_a.csv"), ], ) diff --git a/src/prompts.py b/src/prompts.py deleted file mode 100644 index 7dc95d4..0000000 --- a/src/prompts.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Centralized location for all system prompts.""" - -REACT_INSTRUCTIONS = """\ -Answer the question using the search tool. \ -EACH TIME before invoking the function, you must explain your reasons for doing so. \ -Be sure to mention the sources in your response. \ -If the search tool did not return intended results, try again. \ -For best performance, divide complex queries into simpler sub-queries. \ -Do not make up information. \ -For facts that might change over time, you must use the search tool to retrieve the \ -most up-to-date information. -""" diff --git a/src/utils/__init__.py b/src/utils/__init__.py deleted file mode 100644 index 3e54184..0000000 --- a/src/utils/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Shared toolings for reference implementations.""" - -from .async_utils import gather_with_progress, rate_limited -from .client_manager import AsyncClientManager -from .data.batching import create_batches -from .env_vars import Configs -from .gradio.messages import ( - gradio_messages_to_oai_chat, - oai_agent_items_to_gradio_messages, - oai_agent_stream_to_gradio_messages, -) -from .langfuse.oai_sdk_setup import setup_langfuse_tracer -from .logging import set_up_logging -from .pretty_printing import pretty_print -from .tools.code_interpreter import CodeInterpreter -from .tools.kb_weaviate import AsyncWeaviateKnowledgeBase, get_weaviate_async_client -from .trees import tree_filter diff --git a/src/utils/data/__init__.py b/src/utils/data/__init__.py deleted file mode 100644 index 99eee29..0000000 --- a/src/utils/data/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .load_dataset import get_dataset, get_dataset_url_hash - - -__all__ = ["get_dataset", "get_dataset_url_hash"] diff --git a/src/utils/langfuse/trace_id.py b/src/utils/langfuse/trace_id.py deleted file mode 100644 index e1b93fa..0000000 --- a/src/utils/langfuse/trace_id.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Obtain trace_id, required for linking trace to dataset row. - -Full documentation: -langfuse.com/docs/integrations/openaiagentssdk/example-evaluating-openai-agents -running-the-agent-on-the-dataset -""" - - -def get_langfuse_trace_id(): - """Obtain "formatted" trace_id for LangFuse.""" diff --git a/src/utils/tools/__init__.py b/src/utils/tools/__init__.py deleted file mode 100644 index b1514d1..0000000 --- a/src/utils/tools/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .kb_weaviate import AsyncWeaviateKnowledgeBase, get_weaviate_async_client -from .news_events import get_news_events diff --git a/src/utils/trees.py b/src/utils/trees.py deleted file mode 100644 index e5dade0..0000000 --- a/src/utils/trees.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Utils for handling nested dict.""" - -from typing import Any, Callable, TypeVar - - -Tree = TypeVar("Tree", bound=dict) - - -def tree_filter( - data: Tree, - criteria_fn: Callable[[Any], bool] = lambda x: x is not None, -) -> Tree: - """Keep only leaves for which criteria is True. - - Filters out None leaves if criteria is not specified. - """ - output: Tree = {} # type: ignore[reportAssignType] - for k, v in data.items(): - if isinstance(v, dict): - output[k] = tree_filter(v, criteria_fn=criteria_fn) - elif criteria_fn(v): - output[k] = v - - return output diff --git a/tests/README.md b/tests/README.md deleted file mode 100644 index 98f737e..0000000 --- a/tests/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Unit tests - -```bash -uv run pytest -sv tests/tool_tests/test_weaviate.py -uv run pytest -sv tests/tool_tests/test_code_interpreter.py -uv run pytest -sv tests/tool_tests/test_integration.py -``` diff --git a/tests/tool_tests/test_integration.py b/tests/tool_tests/test_integration.py index 847c8f3..167556e 100644 --- a/tests/tool_tests/test_integration.py +++ b/tests/tool_tests/test_integration.py @@ -5,18 +5,16 @@ import pytest import pytest_asyncio -from dotenv import load_dotenv -from langfuse import get_client -from openai import AsyncOpenAI - -from src.utils import ( +from aieng.agents import Configs, pretty_print +from aieng.agents.langfuse import set_up_langfuse_otlp_env_vars +from aieng.agents.tools import ( AsyncWeaviateKnowledgeBase, - Configs, + GeminiGroundingWithGoogleSearch, get_weaviate_async_client, - pretty_print, ) -from src.utils.langfuse.otlp_env_setup import set_up_langfuse_otlp_env_vars -from src.utils.tools.gemini_grounding import GeminiGroundingWithGoogleSearch +from dotenv import load_dotenv +from langfuse import get_client +from openai import AsyncOpenAI load_dotenv(verbose=True) diff --git a/uv.lock b/uv.lock index 605b563..a254723 100644 --- a/uv.lock +++ b/uv.lock @@ -7,27 +7,21 @@ resolution-markers = [ "python_full_version < '3.13'", ] +[manifest] +members = [ + "agent-bootcamp", + "aieng-agents-utils", +] + [[package]] -name = "agent-bootcamp-202507" +name = "agent-bootcamp" version = "0.1.0" source = { editable = "." } dependencies = [ - { name = "aiohttp" }, - { name = "beautifulsoup4" }, - { name = "datasets" }, - { name = "e2b-code-interpreter" }, - { name = "gradio" }, - { name = "langfuse" }, - { name = "lxml" }, - { name = "nest-asyncio" }, + { name = "aieng-agents-utils" }, { name = "numpy" }, - { name = "openai" }, - { name = "openai-agents" }, { name = "plotly" }, - { name = "pydantic" }, - { name = "pydantic-ai-slim", extra = ["logfire"] }, { name = "scikit-learn" }, - { name = "weaviate-client" }, ] [package.dev-dependencies] @@ -42,13 +36,11 @@ dev = [ { name = "nbqa" }, { name = "pip-audit" }, { name = "pre-commit" }, - { name = "pymupdf" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-mock" }, { name = "ruff" }, - { name = "transformers" }, ] docs = [ { name = "ipykernel" }, @@ -59,31 +51,13 @@ docs = [ { name = "mkdocstrings" }, { name = "mkdocstrings-python" }, ] -web-search = [ - { name = "fastapi", extra = ["standard"] }, - { name = "google-cloud-firestore" }, - { name = "google-genai" }, - { name = "simplejson" }, -] [package.metadata] requires-dist = [ - { name = "aiohttp", specifier = ">=3.12.14" }, - { name = "beautifulsoup4", specifier = ">=4.13.4" }, - { name = "datasets", specifier = ">=4.4.0" }, - { name = "e2b-code-interpreter", specifier = ">=2.3.0" }, - { name = "gradio", specifier = ">=6.1.0" }, - { name = "langfuse", specifier = ">=3.9.0" }, - { name = "lxml", specifier = ">=6.0.0" }, - { name = "nest-asyncio", specifier = ">=1.6.0" }, + { name = "aieng-agents-utils", editable = "aieng-agents-utils" }, { name = "numpy", specifier = "<2.3.0" }, - { name = "openai", specifier = ">=2.6.0" }, - { name = "openai-agents", specifier = ">=0.4.0" }, { name = "plotly", specifier = ">=6.2.0" }, - { name = "pydantic", specifier = ">=2.11.7" }, - { name = "pydantic-ai-slim", extras = ["logfire"], specifier = ">=0.3.7" }, { name = "scikit-learn", specifier = ">=1.7.0" }, - { name = "weaviate-client", specifier = ">=4.15.4" }, ] [package.metadata.requires-dev] @@ -98,13 +72,11 @@ dev = [ { name = "nbqa", specifier = ">=1.9.1" }, { name = "pip-audit", specifier = ">=2.7.3" }, { name = "pre-commit", specifier = ">=4.1.0" }, - { name = "pymupdf", specifier = ">=1.26.7" }, { name = "pytest", specifier = ">=8.3.4" }, { name = "pytest-asyncio", specifier = ">=1.2.0" }, { name = "pytest-cov", specifier = ">=7.0.0" }, { name = "pytest-mock", specifier = ">=3.14.0" }, { name = "ruff", specifier = ">=0.12.2" }, - { name = "transformers", specifier = ">=4.54.1" }, ] docs = [ { name = "ipykernel", specifier = ">=6.29.5" }, @@ -115,11 +87,74 @@ docs = [ { name = "mkdocstrings", specifier = ">=0.24.1" }, { name = "mkdocstrings-python", specifier = ">=1.16.12" }, ] -web-search = [ + +[[package]] +name = "aieng-agents-utils" +version = "0.1.0" +source = { editable = "aieng-agents-utils" } +dependencies = [ + { name = "backoff" }, + { name = "beautifulsoup4" }, + { name = "click" }, + { name = "datasets" }, + { name = "e2b-code-interpreter" }, + { name = "fastapi", extra = ["standard"] }, + { name = "google-cloud-firestore" }, + { name = "google-genai" }, + { name = "gradio" }, + { name = "httpx" }, + { name = "langfuse" }, + { name = "lxml" }, + { name = "nest-asyncio" }, + { name = "openai" }, + { name = "openai-agents" }, + { name = "pandas" }, + { name = "pillow" }, + { name = "pydantic" }, + { name = "pydantic-ai-slim", extra = ["logfire"] }, + { name = "pymupdf" }, + { name = "simplejson" }, + { name = "transformers" }, + { name = "weaviate-client" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "backoff", specifier = ">=2.2.1" }, + { name = "beautifulsoup4", specifier = ">=4.13.4" }, + { name = "click", specifier = ">=8.3.0" }, + { name = "datasets", specifier = ">=4.4.0" }, + { name = "e2b-code-interpreter", specifier = ">=2.3.0" }, { name = "fastapi", extras = ["standard"], specifier = ">=0.116.1" }, { name = "google-cloud-firestore", specifier = ">=2.21.0" }, { name = "google-genai", specifier = ">=1.46.0" }, + { name = "gradio", specifier = ">=6.1.0" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "langfuse", specifier = ">=3.9.0" }, + { name = "lxml", specifier = ">=6.0.0" }, + { name = "nest-asyncio", specifier = ">=1.6.0" }, + { name = "openai", specifier = ">=2.6.0" }, + { name = "openai-agents", specifier = ">=0.4.0" }, + { name = "pandas", specifier = ">=2.3.3" }, + { name = "pillow", specifier = ">=11.3.0" }, + { name = "pydantic", specifier = ">=2.11.7" }, + { name = "pydantic-ai-slim", extras = ["logfire"], specifier = ">=0.3.7" }, + { name = "pymupdf", specifier = ">=1.26.7" }, { name = "simplejson", specifier = ">=3.20.2" }, + { name = "transformers", specifier = ">=4.54.1" }, + { name = "weaviate-client", specifier = ">=4.15.4" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.3.4" }, + { name = "pytest-asyncio", specifier = ">=1.2.0" }, ] [[package]] From 086c3dc0e2737b68628560096093ce6cf6c6ca8f Mon Sep 17 00:00:00 2001 From: fcogidi <41602287+fcogidi@users.noreply.github.com> Date: Mon, 26 Jan 2026 14:02:53 -0500 Subject: [PATCH 2/4] Remove unnecessary getenv assertion from langfuse client setup --- aieng-agents-utils/aieng/agents/langfuse/shared_client.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/aieng-agents-utils/aieng/agents/langfuse/shared_client.py b/aieng-agents-utils/aieng/agents/langfuse/shared_client.py index 7794de1..55e1506 100644 --- a/aieng-agents-utils/aieng/agents/langfuse/shared_client.py +++ b/aieng-agents-utils/aieng/agents/langfuse/shared_client.py @@ -1,7 +1,5 @@ """Shared instance of langfuse client.""" -from os import getenv - from aieng.agents.env_vars import Configs from langfuse import Langfuse from rich.progress import Progress, SpinnerColumn, TextColumn @@ -11,7 +9,6 @@ config = Configs() -assert getenv("LANGFUSE_PUBLIC_KEY") is not None langfuse_client = Langfuse( public_key=config.langfuse_public_key, secret_key=config.langfuse_secret_key ) From 42bc2dfdf01cc21827d7b065bc641d16972ff6f9 Mon Sep 17 00:00:00 2001 From: fcogidi <41602287+fcogidi@users.noreply.github.com> Date: Mon, 26 Jan 2026 14:12:00 -0500 Subject: [PATCH 3/4] Fix formatting issues in prompts and update README paths for consistency --- aieng-agents-utils/aieng/agents/prompts.py | 2 +- aieng-agents-utils/aieng/agents/tools/README.md | 2 +- aieng-agents-utils/tests/README.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aieng-agents-utils/aieng/agents/prompts.py b/aieng-agents-utils/aieng/agents/prompts.py index 44b8258..4c872aa 100644 --- a/aieng-agents-utils/aieng/agents/prompts.py +++ b/aieng-agents-utils/aieng/agents/prompts.py @@ -36,7 +36,7 @@ For every fact you include in the summary, ALWAYS include a citation \ both in-line and at the end of the summary as a numbered list. The \ citation at the end should include relevant metadata from the search \ -results. Do NOT return raw search results. " +results. Do NOT return raw search results." """ WIKI_SEARCH_PLANNER_INSTRUCTIONS = """\ diff --git a/aieng-agents-utils/aieng/agents/tools/README.md b/aieng-agents-utils/aieng/agents/tools/README.md index 1da26ae..d3938b6 100644 --- a/aieng-agents-utils/aieng/agents/tools/README.md +++ b/aieng-agents-utils/aieng/agents/tools/README.md @@ -4,5 +4,5 @@ This module contains various tools for LLM agents. ```bash # Tool for getting a list of recent news headlines from enwiki -uv run --env-file .env python3 aieng-agent-utils/aieng/tools/news_events.py +uv run --env-file .env python3 aieng-agents-utils/aieng/tools/news_events.py ``` diff --git a/aieng-agents-utils/tests/README.md b/aieng-agents-utils/tests/README.md index 36ecc16..3cc2637 100644 --- a/aieng-agents-utils/tests/README.md +++ b/aieng-agents-utils/tests/README.md @@ -6,5 +6,5 @@ uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_weaviate.p uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_code_interpreter.py uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_gemini_grounding.py uv run --env-file .env pytest -sv aieng-agents-utils/tests/tools/test_get_news_events.py -uv run --env-file .env pytest -sv aieng-agents-utils/tests/web_search_test_web_search_auth.py +uv run --env-file .env pytest -sv aieng-agents-utils/tests/web_search/test_web_search_auth.py ``` From 448f6ae66d0f531a648dbdb2e98c53558bf016d9 Mon Sep 17 00:00:00 2001 From: fcogidi <41602287+fcogidi@users.noreply.github.com> Date: Mon, 26 Jan 2026 14:37:05 -0500 Subject: [PATCH 4/4] Upgrade packages flagged by pip-audit and ignore ones that can't be upgraded --- .github/workflows/code_checks.yml | 1 + uv.lock | 116 +++++++++++++++--------------- 2 files changed, 59 insertions(+), 58 deletions(-) diff --git a/.github/workflows/code_checks.yml b/.github/workflows/code_checks.yml index 9eb62d1..4f00a54 100644 --- a/.github/workflows/code_checks.yml +++ b/.github/workflows/code_checks.yml @@ -57,3 +57,4 @@ jobs: virtual-environment: .venv/ ignore-vulns: | GHSA-xm59-rqc7-hhvf + GHSA-7gcm-g887-7qv7 diff --git a/uv.lock b/uv.lock index a254723..68abf1b 100644 --- a/uv.lock +++ b/uv.lock @@ -3236,55 +3236,55 @@ wheels = [ [[package]] name = "orjson" -version = "3.11.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" }, - { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" }, - { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" }, - { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" }, - { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" }, - { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" }, - { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" }, - { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" }, - { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" }, - { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" }, - { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" }, - { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" }, - { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" }, - { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" }, - { url = "https://files.pythonhosted.org/packages/23/15/c52aa7112006b0f3d6180386c3a46ae057f932ab3425bc6f6ac50431cca1/orjson-3.11.4-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2d6737d0e616a6e053c8b4acc9eccea6b6cce078533666f32d140e4f85002534", size = 243525, upload-time = "2025-10-24T15:49:29.737Z" }, - { url = "https://files.pythonhosted.org/packages/ec/38/05340734c33b933fd114f161f25a04e651b0c7c33ab95e9416ade5cb44b8/orjson-3.11.4-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:afb14052690aa328cc118a8e09f07c651d301a72e44920b887c519b313d892ff", size = 128871, upload-time = "2025-10-24T15:49:31.109Z" }, - { url = "https://files.pythonhosted.org/packages/55/b9/ae8d34899ff0c012039b5a7cb96a389b2476e917733294e498586b45472d/orjson-3.11.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38aa9e65c591febb1b0aed8da4d469eba239d434c218562df179885c94e1a3ad", size = 130055, upload-time = "2025-10-24T15:49:33.382Z" }, - { url = "https://files.pythonhosted.org/packages/33/aa/6346dd5073730451bee3681d901e3c337e7ec17342fb79659ec9794fc023/orjson-3.11.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f2cf4dfaf9163b0728d061bebc1e08631875c51cd30bf47cb9e3293bfbd7dcd5", size = 129061, upload-time = "2025-10-24T15:49:34.935Z" }, - { url = "https://files.pythonhosted.org/packages/39/e4/8eea51598f66a6c853c380979912d17ec510e8e66b280d968602e680b942/orjson-3.11.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89216ff3dfdde0e4070932e126320a1752c9d9a758d6a32ec54b3b9334991a6a", size = 136541, upload-time = "2025-10-24T15:49:36.923Z" }, - { url = "https://files.pythonhosted.org/packages/9a/47/cb8c654fa9adcc60e99580e17c32b9e633290e6239a99efa6b885aba9dbc/orjson-3.11.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9daa26ca8e97fae0ce8aa5d80606ef8f7914e9b129b6b5df9104266f764ce436", size = 137535, upload-time = "2025-10-24T15:49:38.307Z" }, - { url = "https://files.pythonhosted.org/packages/43/92/04b8cc5c2b729f3437ee013ce14a60ab3d3001465d95c184758f19362f23/orjson-3.11.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c8b2769dc31883c44a9cd126560327767f848eb95f99c36c9932f51090bfce9", size = 136703, upload-time = "2025-10-24T15:49:40.795Z" }, - { url = "https://files.pythonhosted.org/packages/aa/fd/d0733fcb9086b8be4ebcfcda2d0312865d17d0d9884378b7cffb29d0763f/orjson-3.11.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1469d254b9884f984026bd9b0fa5bbab477a4bfe558bba6848086f6d43eb5e73", size = 136293, upload-time = "2025-10-24T15:49:42.347Z" }, - { url = "https://files.pythonhosted.org/packages/c2/d7/3c5514e806837c210492d72ae30ccf050ce3f940f45bf085bab272699ef4/orjson-3.11.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:68e44722541983614e37117209a194e8c3ad07838ccb3127d96863c95ec7f1e0", size = 140131, upload-time = "2025-10-24T15:49:43.638Z" }, - { url = "https://files.pythonhosted.org/packages/9c/dd/ba9d32a53207babf65bd510ac4d0faaa818bd0df9a9c6f472fe7c254f2e3/orjson-3.11.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:8e7805fda9672c12be2f22ae124dcd7b03928d6c197544fe12174b86553f3196", size = 406164, upload-time = "2025-10-24T15:49:45.498Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f9/f68ad68f4af7c7bde57cd514eaa2c785e500477a8bc8f834838eb696a685/orjson-3.11.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04b69c14615fb4434ab867bf6f38b2d649f6f300af30a6705397e895f7aec67a", size = 149859, upload-time = "2025-10-24T15:49:46.981Z" }, - { url = "https://files.pythonhosted.org/packages/b6/d2/7f847761d0c26818395b3d6b21fb6bc2305d94612a35b0a30eae65a22728/orjson-3.11.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:639c3735b8ae7f970066930e58cf0ed39a852d417c24acd4a25fc0b3da3c39a6", size = 139926, upload-time = "2025-10-24T15:49:48.321Z" }, - { url = "https://files.pythonhosted.org/packages/9f/37/acd14b12dc62db9a0e1d12386271b8661faae270b22492580d5258808975/orjson-3.11.4-cp313-cp313-win32.whl", hash = "sha256:6c13879c0d2964335491463302a6ca5ad98105fc5db3565499dcb80b1b4bd839", size = 136007, upload-time = "2025-10-24T15:49:49.938Z" }, - { url = "https://files.pythonhosted.org/packages/c0/a9/967be009ddf0a1fffd7a67de9c36656b28c763659ef91352acc02cbe364c/orjson-3.11.4-cp313-cp313-win_amd64.whl", hash = "sha256:09bf242a4af98732db9f9a1ec57ca2604848e16f132e3f72edfd3c5c96de009a", size = 131314, upload-time = "2025-10-24T15:49:51.248Z" }, - { url = "https://files.pythonhosted.org/packages/cb/db/399abd6950fbd94ce125cb8cd1a968def95174792e127b0642781e040ed4/orjson-3.11.4-cp313-cp313-win_arm64.whl", hash = "sha256:a85f0adf63319d6c1ba06fb0dbf997fced64a01179cf17939a6caca662bf92de", size = 126152, upload-time = "2025-10-24T15:49:52.922Z" }, - { url = "https://files.pythonhosted.org/packages/25/e3/54ff63c093cc1697e758e4fceb53164dd2661a7d1bcd522260ba09f54533/orjson-3.11.4-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:42d43a1f552be1a112af0b21c10a5f553983c2a0938d2bbb8ecd8bc9fb572803", size = 243501, upload-time = "2025-10-24T15:49:54.288Z" }, - { url = "https://files.pythonhosted.org/packages/ac/7d/e2d1076ed2e8e0ae9badca65bf7ef22710f93887b29eaa37f09850604e09/orjson-3.11.4-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:26a20f3fbc6c7ff2cb8e89c4c5897762c9d88cf37330c6a117312365d6781d54", size = 128862, upload-time = "2025-10-24T15:49:55.961Z" }, - { url = "https://files.pythonhosted.org/packages/9f/37/ca2eb40b90621faddfa9517dfe96e25f5ae4d8057a7c0cdd613c17e07b2c/orjson-3.11.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e3f20be9048941c7ffa8fc523ccbd17f82e24df1549d1d1fe9317712d19938e", size = 130047, upload-time = "2025-10-24T15:49:57.406Z" }, - { url = "https://files.pythonhosted.org/packages/c7/62/1021ed35a1f2bad9040f05fa4cc4f9893410df0ba3eaa323ccf899b1c90a/orjson-3.11.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aac364c758dc87a52e68e349924d7e4ded348dedff553889e4d9f22f74785316", size = 129073, upload-time = "2025-10-24T15:49:58.782Z" }, - { url = "https://files.pythonhosted.org/packages/e8/3f/f84d966ec2a6fd5f73b1a707e7cd876813422ae4bf9f0145c55c9c6a0f57/orjson-3.11.4-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5c54a6d76e3d741dcc3f2707f8eeb9ba2a791d3adbf18f900219b62942803b1", size = 136597, upload-time = "2025-10-24T15:50:00.12Z" }, - { url = "https://files.pythonhosted.org/packages/32/78/4fa0aeca65ee82bbabb49e055bd03fa4edea33f7c080c5c7b9601661ef72/orjson-3.11.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f28485bdca8617b79d44627f5fb04336897041dfd9fa66d383a49d09d86798bc", size = 137515, upload-time = "2025-10-24T15:50:01.57Z" }, - { url = "https://files.pythonhosted.org/packages/c1/9d/0c102e26e7fde40c4c98470796d050a2ec1953897e2c8ab0cb95b0759fa2/orjson-3.11.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2a484cad3585e4ba61985a6062a4c2ed5c7925db6d39f1fa267c9d166487f", size = 136703, upload-time = "2025-10-24T15:50:02.944Z" }, - { url = "https://files.pythonhosted.org/packages/df/ac/2de7188705b4cdfaf0b6c97d2f7849c17d2003232f6e70df98602173f788/orjson-3.11.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e34dbd508cb91c54f9c9788923daca129fe5b55c5b4eebe713bf5ed3791280cf", size = 136311, upload-time = "2025-10-24T15:50:04.441Z" }, - { url = "https://files.pythonhosted.org/packages/e0/52/847fcd1a98407154e944feeb12e3b4d487a0e264c40191fb44d1269cbaa1/orjson-3.11.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b13c478fa413d4b4ee606ec8e11c3b2e52683a640b006bb586b3041c2ca5f606", size = 140127, upload-time = "2025-10-24T15:50:07.398Z" }, - { url = "https://files.pythonhosted.org/packages/c1/ae/21d208f58bdb847dd4d0d9407e2929862561841baa22bdab7aea10ca088e/orjson-3.11.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:724ca721ecc8a831b319dcd72cfa370cc380db0bf94537f08f7edd0a7d4e1780", size = 406201, upload-time = "2025-10-24T15:50:08.796Z" }, - { url = "https://files.pythonhosted.org/packages/8d/55/0789d6de386c8366059db098a628e2ad8798069e94409b0d8935934cbcb9/orjson-3.11.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:977c393f2e44845ce1b540e19a786e9643221b3323dae190668a98672d43fb23", size = 149872, upload-time = "2025-10-24T15:50:10.234Z" }, - { url = "https://files.pythonhosted.org/packages/cc/1d/7ff81ea23310e086c17b41d78a72270d9de04481e6113dbe2ac19118f7fb/orjson-3.11.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1e539e382cf46edec157ad66b0b0872a90d829a6b71f17cb633d6c160a223155", size = 139931, upload-time = "2025-10-24T15:50:11.623Z" }, - { url = "https://files.pythonhosted.org/packages/77/92/25b886252c50ed64be68c937b562b2f2333b45afe72d53d719e46a565a50/orjson-3.11.4-cp314-cp314-win32.whl", hash = "sha256:d63076d625babab9db5e7836118bdfa086e60f37d8a174194ae720161eb12394", size = 136065, upload-time = "2025-10-24T15:50:13.025Z" }, - { url = "https://files.pythonhosted.org/packages/63/b8/718eecf0bb7e9d64e4956afaafd23db9f04c776d445f59fe94f54bdae8f0/orjson-3.11.4-cp314-cp314-win_amd64.whl", hash = "sha256:0a54d6635fa3aaa438ae32e8570b9f0de36f3f6562c308d2a2a452e8b0592db1", size = 131310, upload-time = "2025-10-24T15:50:14.46Z" }, - { url = "https://files.pythonhosted.org/packages/1a/bf/def5e25d4d8bfce296a9a7c8248109bf58622c21618b590678f945a2c59c/orjson-3.11.4-cp314-cp314-win_arm64.whl", hash = "sha256:78b999999039db3cf58f6d230f524f04f75f129ba3d1ca2ed121f8657e575d3d", size = 126151, upload-time = "2025-10-24T15:50:15.878Z" }, +version = "3.11.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a4/8052a029029b096a78955eadd68ab594ce2197e24ec50e6b6d2ab3f4e33b/orjson-3.11.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d", size = 245347, upload-time = "2025-12-06T15:54:22.061Z" }, + { url = "https://files.pythonhosted.org/packages/64/67/574a7732bd9d9d79ac620c8790b4cfe0717a3d5a6eb2b539e6e8995e24a0/orjson-3.11.5-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626", size = 129435, upload-time = "2025-12-06T15:54:23.615Z" }, + { url = "https://files.pythonhosted.org/packages/52/8d/544e77d7a29d90cf4d9eecd0ae801c688e7f3d1adfa2ebae5e1e94d38ab9/orjson-3.11.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f", size = 132074, upload-time = "2025-12-06T15:54:24.694Z" }, + { url = "https://files.pythonhosted.org/packages/6e/57/b9f5b5b6fbff9c26f77e785baf56ae8460ef74acdb3eae4931c25b8f5ba9/orjson-3.11.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85", size = 130520, upload-time = "2025-12-06T15:54:26.185Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6d/d34970bf9eb33f9ec7c979a262cad86076814859e54eb9a059a52f6dc13d/orjson-3.11.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9", size = 136209, upload-time = "2025-12-06T15:54:27.264Z" }, + { url = "https://files.pythonhosted.org/packages/e7/39/bc373b63cc0e117a105ea12e57280f83ae52fdee426890d57412432d63b3/orjson-3.11.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626", size = 139837, upload-time = "2025-12-06T15:54:28.75Z" }, + { url = "https://files.pythonhosted.org/packages/cb/aa/7c4818c8d7d324da220f4f1af55c343956003aa4d1ce1857bdc1d396ba69/orjson-3.11.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa", size = 137307, upload-time = "2025-12-06T15:54:29.856Z" }, + { url = "https://files.pythonhosted.org/packages/46/bf/0993b5a056759ba65145effe3a79dd5a939d4a070eaa5da2ee3180fbb13f/orjson-3.11.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477", size = 139020, upload-time = "2025-12-06T15:54:31.024Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/83a6c95db3039e504eda60fc388f9faedbb4f6472f5aba7084e06552d9aa/orjson-3.11.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e", size = 141099, upload-time = "2025-12-06T15:54:32.196Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b4/24fdc024abfce31c2f6812973b0a693688037ece5dc64b7a60c1ce69e2f2/orjson-3.11.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69", size = 413540, upload-time = "2025-12-06T15:54:33.361Z" }, + { url = "https://files.pythonhosted.org/packages/d9/37/01c0ec95d55ed0c11e4cae3e10427e479bba40c77312b63e1f9665e0737d/orjson-3.11.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3", size = 151530, upload-time = "2025-12-06T15:54:34.6Z" }, + { url = "https://files.pythonhosted.org/packages/f9/d4/f9ebc57182705bb4bbe63f5bbe14af43722a2533135e1d2fb7affa0c355d/orjson-3.11.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca", size = 141863, upload-time = "2025-12-06T15:54:35.801Z" }, + { url = "https://files.pythonhosted.org/packages/0d/04/02102b8d19fdcb009d72d622bb5781e8f3fae1646bf3e18c53d1bc8115b5/orjson-3.11.5-cp312-cp312-win32.whl", hash = "sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98", size = 135255, upload-time = "2025-12-06T15:54:37.209Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fb/f05646c43d5450492cb387de5549f6de90a71001682c17882d9f66476af5/orjson-3.11.5-cp312-cp312-win_amd64.whl", hash = "sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875", size = 133252, upload-time = "2025-12-06T15:54:38.401Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/7b8c0b26ba18c793533ac1cd145e131e46fcf43952aa94c109b5b913c1f0/orjson-3.11.5-cp312-cp312-win_arm64.whl", hash = "sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe", size = 126777, upload-time = "2025-12-06T15:54:39.515Z" }, + { url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" }, + { url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" }, + { url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" }, + { url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" }, + { url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" }, + { url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" }, + { url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" }, + { url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" }, + { url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" }, + { url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" }, + { url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" }, + { url = "https://files.pythonhosted.org/packages/c2/60/77d7b839e317ead7bb225d55bb50f7ea75f47afc489c81199befc5435b50/orjson-3.11.5-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e446a8ea0a4c366ceafc7d97067bfd55292969143b57e3c846d87fc701e797a0", size = 245252, upload-time = "2025-12-06T15:55:01.127Z" }, + { url = "https://files.pythonhosted.org/packages/f1/aa/d4639163b400f8044cef0fb9aa51b0337be0da3a27187a20d1166e742370/orjson-3.11.5-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:53deb5addae9c22bbe3739298f5f2196afa881ea75944e7720681c7080909a81", size = 129419, upload-time = "2025-12-06T15:55:02.723Z" }, + { url = "https://files.pythonhosted.org/packages/30/94/9eabf94f2e11c671111139edf5ec410d2f21e6feee717804f7e8872d883f/orjson-3.11.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd00d49d6063d2b8791da5d4f9d20539c5951f965e45ccf4e96d33505ce68f", size = 132050, upload-time = "2025-12-06T15:55:03.918Z" }, + { url = "https://files.pythonhosted.org/packages/3d/c8/ca10f5c5322f341ea9a9f1097e140be17a88f88d1cfdd29df522970d9744/orjson-3.11.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3fd15f9fc8c203aeceff4fda211157fad114dde66e92e24097b3647a08f4ee9e", size = 130370, upload-time = "2025-12-06T15:55:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/25/d4/e96824476d361ee2edd5c6290ceb8d7edf88d81148a6ce172fc00278ca7f/orjson-3.11.5-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df95000fbe6777bf9820ae82ab7578e8662051bb5f83d71a28992f539d2cda7", size = 136012, upload-time = "2025-12-06T15:55:06.402Z" }, + { url = "https://files.pythonhosted.org/packages/85/8e/9bc3423308c425c588903f2d103cfcfe2539e07a25d6522900645a6f257f/orjson-3.11.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a8d676748fca47ade5bc3da7430ed7767afe51b2f8100e3cd65e151c0eaceb", size = 139809, upload-time = "2025-12-06T15:55:07.656Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/b404e94e0b02a232b957c54643ce68d0268dacb67ac33ffdee24008c8b27/orjson-3.11.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa0f513be38b40234c77975e68805506cad5d57b3dfd8fe3baa7f4f4051e15b4", size = 137332, upload-time = "2025-12-06T15:55:08.961Z" }, + { url = "https://files.pythonhosted.org/packages/51/30/cc2d69d5ce0ad9b84811cdf4a0cd5362ac27205a921da524ff42f26d65e0/orjson-3.11.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1863e75b92891f553b7922ce4ee10ed06db061e104f2b7815de80cdcb135ad", size = 138983, upload-time = "2025-12-06T15:55:10.595Z" }, + { url = "https://files.pythonhosted.org/packages/0e/87/de3223944a3e297d4707d2fe3b1ffb71437550e165eaf0ca8bbe43ccbcb1/orjson-3.11.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d4be86b58e9ea262617b8ca6251a2f0d63cc132a6da4b5fcc8e0a4128782c829", size = 141069, upload-time = "2025-12-06T15:55:11.832Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/81d5087ae74be33bcae3ff2d80f5ccaa4a8fedc6d39bf65a427a95b8977f/orjson-3.11.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:b923c1c13fa02084eb38c9c065afd860a5cff58026813319a06949c3af5732ac", size = 413491, upload-time = "2025-12-06T15:55:13.314Z" }, + { url = "https://files.pythonhosted.org/packages/d0/6f/f6058c21e2fc1efaf918986dbc2da5cd38044f1a2d4b7b91ad17c4acf786/orjson-3.11.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1b6bd351202b2cd987f35a13b5e16471cf4d952b42a73c391cc537974c43ef6d", size = 151375, upload-time = "2025-12-06T15:55:14.715Z" }, + { url = "https://files.pythonhosted.org/packages/54/92/c6921f17d45e110892899a7a563a925b2273d929959ce2ad89e2525b885b/orjson-3.11.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb150d529637d541e6af06bbe3d02f5498d628b7f98267ff87647584293ab439", size = 141850, upload-time = "2025-12-06T15:55:15.94Z" }, + { url = "https://files.pythonhosted.org/packages/88/86/cdecb0140a05e1a477b81f24739da93b25070ee01ce7f7242f44a6437594/orjson-3.11.5-cp314-cp314-win32.whl", hash = "sha256:9cc1e55c884921434a84a0c3dd2699eb9f92e7b441d7f53f3941079ec6ce7499", size = 135278, upload-time = "2025-12-06T15:55:17.202Z" }, + { url = "https://files.pythonhosted.org/packages/e4/97/b638d69b1e947d24f6109216997e38922d54dcdcdb1b11c18d7efd2d3c59/orjson-3.11.5-cp314-cp314-win_amd64.whl", hash = "sha256:a4f3cb2d874e03bc7767c8f88adaa1a9a05cecea3712649c3b58589ec7317310", size = 133170, upload-time = "2025-12-06T15:55:18.468Z" }, + { url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" }, ] [[package]] @@ -3686,17 +3686,17 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.0" +version = "6.33.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/ff/64a6c8f420818bb873713988ca5492cba3a7946be57e027ac63495157d97/protobuf-6.33.0.tar.gz", hash = "sha256:140303d5c8d2037730c548f8c7b93b20bb1dc301be280c378b82b8894589c954", size = 443463, upload-time = "2025-10-15T20:39:52.159Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/b8/cda15d9d46d03d4aa3a67cb6bffe05173440ccf86a9541afaf7ac59a1b6b/protobuf-6.33.4.tar.gz", hash = "sha256:dc2e61bca3b10470c1912d166fe0af67bfc20eb55971dcef8dfa48ce14f0ed91", size = 444346, upload-time = "2026-01-12T18:33:40.109Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/ee/52b3fa8feb6db4a833dfea4943e175ce645144532e8a90f72571ad85df4e/protobuf-6.33.0-cp310-abi3-win32.whl", hash = "sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035", size = 425593, upload-time = "2025-10-15T20:39:40.29Z" }, - { url = "https://files.pythonhosted.org/packages/7b/c6/7a465f1825872c55e0341ff4a80198743f73b69ce5d43ab18043699d1d81/protobuf-6.33.0-cp310-abi3-win_amd64.whl", hash = "sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee", size = 436882, upload-time = "2025-10-15T20:39:42.841Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a9/b6eee662a6951b9c3640e8e452ab3e09f117d99fc10baa32d1581a0d4099/protobuf-6.33.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:905b07a65f1a4b72412314082c7dbfae91a9e8b68a0cc1577515f8df58ecf455", size = 427521, upload-time = "2025-10-15T20:39:43.803Z" }, - { url = "https://files.pythonhosted.org/packages/10/35/16d31e0f92c6d2f0e77c2a3ba93185130ea13053dd16200a57434c882f2b/protobuf-6.33.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e0697ece353e6239b90ee43a9231318302ad8353c70e6e45499fa52396debf90", size = 324445, upload-time = "2025-10-15T20:39:44.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/eb/2a981a13e35cda8b75b5585aaffae2eb904f8f351bdd3870769692acbd8a/protobuf-6.33.0-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:e0a1715e4f27355afd9570f3ea369735afc853a6c3951a6afe1f80d8569ad298", size = 339159, upload-time = "2025-10-15T20:39:46.186Z" }, - { url = "https://files.pythonhosted.org/packages/21/51/0b1cbad62074439b867b4e04cc09b93f6699d78fd191bed2bbb44562e077/protobuf-6.33.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:35be49fd3f4fefa4e6e2aacc35e8b837d6703c37a2168a55ac21e9b1bc7559ef", size = 323172, upload-time = "2025-10-15T20:39:47.465Z" }, - { url = "https://files.pythonhosted.org/packages/07/d1/0a28c21707807c6aacd5dc9c3704b2aa1effbf37adebd8caeaf68b17a636/protobuf-6.33.0-py3-none-any.whl", hash = "sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995", size = 170477, upload-time = "2025-10-15T20:39:51.311Z" }, + { url = "https://files.pythonhosted.org/packages/e0/be/24ef9f3095bacdf95b458543334d0c4908ccdaee5130420bf064492c325f/protobuf-6.33.4-cp310-abi3-win32.whl", hash = "sha256:918966612c8232fc6c24c78e1cd89784307f5814ad7506c308ee3cf86662850d", size = 425612, upload-time = "2026-01-12T18:33:29.656Z" }, + { url = "https://files.pythonhosted.org/packages/31/ad/e5693e1974a28869e7cd244302911955c1cebc0161eb32dfa2b25b6e96f0/protobuf-6.33.4-cp310-abi3-win_amd64.whl", hash = "sha256:8f11ffae31ec67fc2554c2ef891dcb561dae9a2a3ed941f9e134c2db06657dbc", size = 436962, upload-time = "2026-01-12T18:33:31.345Z" }, + { url = "https://files.pythonhosted.org/packages/66/15/6ee23553b6bfd82670207ead921f4d8ef14c107e5e11443b04caeb5ab5ec/protobuf-6.33.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2fe67f6c014c84f655ee06f6f66213f9254b3a8b6bda6cda0ccd4232c73c06f0", size = 427612, upload-time = "2026-01-12T18:33:32.646Z" }, + { url = "https://files.pythonhosted.org/packages/2b/48/d301907ce6d0db75f959ca74f44b475a9caa8fcba102d098d3c3dd0f2d3f/protobuf-6.33.4-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:757c978f82e74d75cba88eddec479df9b99a42b31193313b75e492c06a51764e", size = 324484, upload-time = "2026-01-12T18:33:33.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/1c/e53078d3f7fe710572ab2dcffd993e1e3b438ae71cfc031b71bae44fcb2d/protobuf-6.33.4-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c7c64f259c618f0bef7bee042075e390debbf9682334be2b67408ec7c1c09ee6", size = 339256, upload-time = "2026-01-12T18:33:35.231Z" }, + { url = "https://files.pythonhosted.org/packages/e8/8e/971c0edd084914f7ee7c23aa70ba89e8903918adca179319ee94403701d5/protobuf-6.33.4-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:3df850c2f8db9934de4cf8f9152f8dc2558f49f298f37f90c517e8e5c84c30e9", size = 323311, upload-time = "2026-01-12T18:33:36.305Z" }, + { url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532, upload-time = "2026-01-12T18:33:39.199Z" }, ] [[package]]