diff --git a/.gitignore b/.gitignore index 87f787a38..bb21625c0 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ node_modules .pnp.js # testing +.scratch /coverage # next.js @@ -54,3 +55,4 @@ v2.code-workspace libs/.docker/ui/supabase/supabase-setup google_cloud_service_key.json + diff --git a/fern/mdx/deploy/local.mdx b/fern/mdx/deploy/local.mdx index d99a81add..c4fd0b4ae 100644 --- a/fern/mdx/deploy/local.mdx +++ b/fern/mdx/deploy/local.mdx @@ -1,19 +1,26 @@ -If you don't want to use the Superagent Cloud you can optionally run Superagent localy. In order to make Superagent more modular we have decoupled the Superagent API from the Superagent UI. You can run the API without any by just leveraging the SDKs. Follow the guides below to get started. +If you don't want to use the Superagent Cloud you can optionally run Superagent localy. In order to make Superagent more modular we have decoupled the Superagent API from the Superagent UI. You can run the API directly by just leveraging the SDKs. Follow the guides below to get started. -# Setup Superagent API +## Setup Superagent API ### Getting started -1. Clone the [Superagent repository](https://github.com/homanp/superagent). -```bash -git clone git@github.com:homanp/superagent.git -``` +1. Clone the [Superagent repository](https://github.com/homanp/superagent) and open the superagent folder. + + ```bash + git clone git@github.com:homanp/superagent.git + cd superagent + ``` -2. Navigate to `/libs/superagent` and rename the `.env.example` to `.env`. +2. Navigate to `libs/superagent` and copy the `.env.example` to a new file named `.env`. + + ```bash + cd libs/superagent + cp .env.example .env + ``` ### Set `.env` variables -In order to get Superagent running we need to set some mandatory environment variables. Here is an example: +In order to get Superagent running we need to set some mandatory environment variables. Here is an example: ```bash # MANDATORY VARIABLES @@ -63,35 +70,61 @@ E2B_API_KEY= # Run Replicate models as tools REPLICATE_API_TOKEN= ``` + ### Create a virtual env + +In the console, create a virtual environment to install and run the python api. + +```bash +# cd libs/superagent virtualenv venv +``` + +Then activate the new virtual environment to work in it. + +```bash +# Linux/Mac source venv/bin/activate ``` +```bash +# Windows (bash) +source venv/Scripts/activate +``` + ### Install dependencies + +Use poetry to install the project dependencies in the virtual environment + ```bash +# cd libs/superagent poetry install ``` ### Start Supabase + ```bash supabase start ``` ### Get Supabase Info + Get the hostname and port of your Supabase instance and replace them in the `DATABASE_URL` and `DATABASE_MIGRATION_URL` variable in your `.env` file. + ```bash supabase status ``` ### Run database migrations -```bash -prisma migrate dev + +```bash prisma generate +prisma migrate dev ``` ### Start the server -```bash + +```bash uvicorn app.main:app --reload # Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) @@ -103,7 +136,7 @@ uvicorn app.main:app --reload ```python - import os + import os from superagent.client import Superagent client = Superagent( @@ -124,7 +157,7 @@ uvicorn app.main:app --reload -And that's it! You now have an instance of the Superagent API running on your local machine. Make sure to checkout the REST API reference for more details on each API endpoint or visit http://127.0.0.1:8000 to view the Swagger docs. +And that's it! You now have an instance of the Superagent API running on your local machine. Make sure to checkout the REST API reference for more details on each API endpoint or visit [http://127.0.0.1:8000](http://127.0.0.1:8000) to view the Swagger docs. # Setup Superagent UI @@ -133,20 +166,23 @@ And that's it! You now have an instance of the Superagent API running on your lo The Superagent UI is setup using Supabase. It leverages the Supabase's `Auth` and `Storage` modules. Configure your Supabase setup as per the instructions below: 1. Clone the Superagent repository -```bash + +```bash git clone git@github.com:homanp/superagent.git ``` 2. Navigate to `/libs/ui` -3. Install Supabase CLI and create project: https://supabase.com/docs/guides/cli/getting-started +3. Install Supabase CLI and create project: [https://supabase.com/docs/guides/cli/getting-started](https://supabase.com/docs/guides/cli/getting-started) 4. Start the local Supabase project + ```bash supabase start ``` 5. Run the database migrations + ```bash # Local supabase project supabase migration up @@ -159,13 +195,15 @@ supabase db push Screenshot 2023-09-14 at 23 27 35 ### Setup authentication -Superagent supports password-less authentication and Github OAuth. You may add which ever Authentication provider supported by Supabase. + +Superagent supports password-less authentication and Github OAuth. You may add which ever Authentication provider supported by Supabase. 1. Create a new Github OAuth app in your [Github account](https://github.com/settings/developers) 2. Copy the `CLIENT_ID` and `CLIENT_SECRET` and paste them into the `/libs/ui/.env` variables in your local Superagent project. 3. Set the following callback URL + ```sh /auth/v1/callback ``` @@ -181,7 +219,8 @@ Superagent supports password-less authentication and Github OAuth. You may add w 1. Change the name of `.env.example` to `.env` in `/libs/ui`. 2. Set the required environment variables. -```bash + +```bash # MANDATORY # Supabase variables found inside of the Supabase dashboard (local or remote) @@ -217,13 +256,15 @@ NEXT_PUBLIC_LANGFUSE_BASE_URL= ### Run app 1. From inside `/libs/ui` run: -```bash + +```bash npm i ``` 2. Start the app + ``` npm run dev ``` -You should now be able to visit http://localhost:3000 and see the Superagent login page \ No newline at end of file +You should now be able to visit [http://localhost:3000](http://localhost:3000) and see the Superagent login page diff --git a/fern/mdx/sdk/saml.mdx b/fern/mdx/sdk/saml.mdx index 6d2e458ac..6993cf1e0 100644 --- a/fern/mdx/sdk/saml.mdx +++ b/fern/mdx/sdk/saml.mdx @@ -56,7 +56,7 @@ workflows: name: Browser use_for: Scraping web pages - - openai_assistants: + - openai_assistant: name: Summarizing Assistant prompt: You are an AI assistant that excels in summarizing content. llm: gpt-4-1106-preview diff --git a/libs/.docker/.env.example b/libs/.docker/.env.example index 8451d380b..84dd138b3 100644 --- a/libs/.docker/.env.example +++ b/libs/.docker/.env.example @@ -16,6 +16,7 @@ PGADMIN_DEFAULT_PASSWORD=local123 # Base (mandatory) SUPERAGENT_API_URL=https://api.beta.superagent.sh +SUPERRAG_API_URL="https://rag.beta.superagent.sh/api/v1" OPENAI_API_KEY= DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_DB_HOST}:${POSTGRES_DB_PORT}/superagent DATABASE_MIGRATION_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_DB_HOST}:${POSTGRES_DB_PORT}/superagent @@ -25,7 +26,11 @@ OPENROUTER_API_KEY= # Mandatory for Neon DB DATABASE_SHADOW_URL= # Memory (mandatory) +MEMORY=motorhead MEMORY_API_URL=http://motorhead:8081 +# Redis memory +REDIS_MEMORY_URL=redis://localhost:6379/0 +REDIS_MEMORY_WINDOW=10 # NOTE: Vectorstores (one is mandatory if you plan on loading datasources) VECTORSTORE=pinecone # `qdrant`, `weaviate` etc. # Qdrant vars diff --git a/libs/.docker/docker-compose.yml b/libs/.docker/docker-compose.yml index 4a069194f..2da2187d6 100644 --- a/libs/.docker/docker-compose.yml +++ b/libs/.docker/docker-compose.yml @@ -11,6 +11,7 @@ services: environment: - PORT=8080 - SUPERAGENT_API_URL=${SUPERAGENT_API_URL} + - SUPERRAG_API_URL=${SUPERRAG_API_URL} - STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY} - POSTGRES_DB_HOST=${POSTGRES_DB_HOST} - POSTGRES_DB_PORT=${POSTGRES_DB_PORT} @@ -20,7 +21,10 @@ services: - JWT_SECRET=${JWT_SECRET} - OPENROUTER_API_KEY=${OPENROUTER_API_KEY} - DATABASE_SHADOW_URL=${DATABASE_SHADOW_URL} + - MEMORY=${MEMORY} - MEMORY_API_URL=${MEMORY_API_URL} + - REDIS_MEMORY_URL=${REDIS_MEMORY_URL} + - REDIS_MEMORY_WINDOW=${REDIS_MEMORY_WINDOW} - VECTORSTORE=${VECTORSTORE} - QDRANT_API_KEY=${QDRANT_API_KEY} - QDRANT_HOST=${QDRANT_HOST} diff --git a/libs/.docker/superagent/db/.env.example b/libs/.docker/superagent/db/.env.example new file mode 100644 index 000000000..62fa66882 --- /dev/null +++ b/libs/.docker/superagent/db/.env.example @@ -0,0 +1,8 @@ +# Mandatory +POSTGRES_USER=postgres +POSTGRES_PASSWORD=password +POSTGRES_DB_PORT=5432 +POSTGRES_DB_HOST=pgdb +# Reccomended but Optional - needed if using docker-compose.pgadmin.yml +PGADMIN_DEFAULT_EMAIL=admin@admin.com +PGADMIN_DEFAULT_PASSWORD=local123 \ No newline at end of file diff --git a/libs/.docker/superagent/db/run.sh b/libs/.docker/superagent/db/run.sh new file mode 100644 index 000000000..dca3bd4cb --- /dev/null +++ b/libs/.docker/superagent/db/run.sh @@ -0,0 +1,17 @@ +# Remove any running services +./stop.sh + +# Check if the network exists +if ! docker network ls | grep -q superagent_network; then + # Create the network if it does not exist + docker network create superagent_network +fi + +# Run the db services +docker compose -f docker-compose.pgdb.yml \ + -f docker-compose.pgadmin.yml \ + up \ + --build \ + -d + +docker logs pgdb \ No newline at end of file diff --git a/libs/.docker/superagent/db/stop.sh b/libs/.docker/superagent/db/stop.sh new file mode 100644 index 000000000..0e09b3ab9 --- /dev/null +++ b/libs/.docker/superagent/db/stop.sh @@ -0,0 +1,5 @@ +# Remove any running services +docker compose -f docker-compose.pgdb.yml \ + -f docker-compose.pgadmin.yml \ + down + # -v # TODO: remove the -v flag when we have a persistent database diff --git a/libs/.docker/superagent/db/uninstall.sh b/libs/.docker/superagent/db/uninstall.sh new file mode 100644 index 000000000..6925b153f --- /dev/null +++ b/libs/.docker/superagent/db/uninstall.sh @@ -0,0 +1,6 @@ +# Remove any running services +docker compose -f docker-compose.pgdb.yml \ + -f docker-compose.pgadmin.yml \ + down \ + -v \ + --remove-orphans \ No newline at end of file diff --git a/libs/superagent/.env.example b/libs/superagent/.env.example index ef376275b..148f3b4b8 100644 --- a/libs/superagent/.env.example +++ b/libs/superagent/.env.example @@ -8,6 +8,11 @@ OPENROUTER_API_KEY= # Mandatory for Neon DB DATABASE_SHADOW_URL= # Memory (mandatory) +MEMORY=motorhead +# Redis Memory +REDIS_MEMORY_URL=redis://localhost:6379/0 +REDIS_MEMORY_WINDOW=10 +# Motorhead Memory MEMORY_API_URL=https://memory.superagent.sh # NOTE: Vectorstores (one is mandatory if you plan on loading datasources) VECTORSTORE=pinecone # `qdrant`, `weaviate` etc. diff --git a/libs/superagent/Dockerfile b/libs/superagent/Dockerfile index e4ef343e7..6aaf1e365 100644 --- a/libs/superagent/Dockerfile +++ b/libs/superagent/Dockerfile @@ -33,6 +33,10 @@ COPY --from=builder /app/.venv /app/.venv COPY . ./ +# Improve grpc error messages +RUN pip install grpcio-status + +# Enable prisma migrations RUN prisma generate CMD exec gunicorn --bind :$PORT --workers 2 --timeout 0 --worker-class uvicorn.workers.UvicornWorker --threads 8 app.main:app diff --git a/libs/superagent/app/agents/langchain.py b/libs/superagent/app/agents/langchain.py index 144e8f674..cc962782c 100644 --- a/libs/superagent/app/agents/langchain.py +++ b/libs/superagent/app/agents/langchain.py @@ -6,7 +6,11 @@ from decouple import config from langchain.agents import AgentType, initialize_agent from langchain.chains import LLMChain -from langchain.memory.motorhead_memory import MotorheadMemory +from langchain.memory import ( + ConversationBufferWindowMemory, + MotorheadMemory, + RedisChatMessageHistory, +) from langchain.prompts import MessagesPlaceholder, PromptTemplate from langchain.schema import SystemMessage from langchain_openai import AzureChatOpenAI, ChatOpenAI @@ -190,18 +194,36 @@ async def _get_prompt(self, agent: Agent) -> str: return SystemMessage(content=content) async def _get_memory(self) -> List: - memory = MotorheadMemory( - session_id=( - f"{self.agent_id}-{self.session_id}" - if self.session_id - else f"{self.agent_id}" - ), - memory_key="chat_history", - url=config("MEMORY_API_URL"), - return_messages=True, - output_key="output", - ) - await memory.init() + memory_type = config("MEMORY", "motorhead") + if memory_type == "redis": + memory = ConversationBufferWindowMemory( + chat_memory=RedisChatMessageHistory( + session_id=( + f"{self.agent_id}-{self.session_id}" + if self.session_id + else f"{self.agent_id}" + ), + url=config("REDIS_MEMORY_URL", "redis://localhost:6379/0"), + key_prefix="superagent:", + ), + memory_key="chat_history", + return_messages=True, + output_key="output", + k=config("REDIS_MEMORY_WINDOW", 10), + ) + else: + memory = MotorheadMemory( + session_id=( + f"{self.agent_id}-{self.session_id}" + if self.session_id + else f"{self.agent_id}" + ), + memory_key="chat_history", + url=config("MEMORY_API_URL"), + return_messages=True, + output_key="output", + ) + await memory.init() return memory async def get_agent(self): diff --git a/libs/superagent/app/api/agents.py b/libs/superagent/app/api/agents.py index 4fa6e5e83..ed8672134 100644 --- a/libs/superagent/app/api/agents.py +++ b/libs/superagent/app/api/agents.py @@ -62,6 +62,7 @@ router = APIRouter() logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) class LLMPayload: @@ -441,7 +442,8 @@ async def invoke( "tools": {"include": {"tool": True}}, }, ) - model = LLM_MAPPING.get(agent_config.llmModel) or agent_config.metadata["model"] + model = LLM_MAPPING.get( + agent_config.llmModel) or agent_config.metadata["model"] def track_agent_invocation(result): intermediate_steps_to_obj = [ @@ -522,7 +524,7 @@ async def send_message( } ) except Exception as e: - logging.error(f"Error tracking agent invocation: {e}") + logger.error(f"Error tracking agent invocation: {e}") if "intermediate_steps" in result: for step in result["intermediate_steps"]: @@ -536,17 +538,18 @@ async def send_message( f'"args": {json.dumps(args)}}}\n\n' ) except Exception as error: - logging.error(f"Error in send_message: {error}") + logger.error(f"Error in send_message: {error}") if SEGMENT_WRITE_KEY: try: - track_agent_invocation({"error": str(error), "status_code": 500}) + track_agent_invocation( + {"error": str(error), "status_code": 500}) except Exception as e: - logging.error(f"Error tracking agent invocation: {e}") + logger.error(f"Error tracking agent invocation: {e}") yield ("event: error\n" f"data: {error}\n\n") finally: streaming_callback.done.set() - logging.info("Invoking agent...") + logger.info("Invoking agent...") session_id = body.sessionId input = body.input enable_streaming = body.enableStreaming @@ -570,7 +573,7 @@ async def send_message( ) if enable_streaming: - logging.info("Streaming enabled. Preparing streaming response...") + logger.info("Streaming enabled. Preparing streaming response...") generator = send_message( agent, @@ -580,7 +583,7 @@ async def send_message( ) return StreamingResponse(generator, media_type="text/event-stream") - logging.info("Streaming not enabled. Invoking agent synchronously...") + logger.info("Streaming not enabled. Invoking agent synchronously...") output = await agent.ainvoke( input=agent_input, @@ -594,7 +597,7 @@ async def send_message( try: output = json.loads(output.get("output")) except Exception as e: - logging.error(f"Error parsing output: {e}") + logger.error(f"Error parsing output: {e}") if not enable_streaming and SEGMENT_WRITE_KEY: try: @@ -608,7 +611,7 @@ async def send_message( } ) except Exception as e: - logging.error(f"Error tracking agent invocation: {e}") + logger.error(f"Error tracking agent invocation: {e}") return {"success": True, "data": output} diff --git a/libs/superagent/app/api/workflows.py b/libs/superagent/app/api/workflows.py index 850cd9a88..f5c2d53bd 100644 --- a/libs/superagent/app/api/workflows.py +++ b/libs/superagent/app/api/workflows.py @@ -182,7 +182,8 @@ async def invoke( workflow_data = await prisma.workflow.find_unique( where={"id": workflow_id}, - include={"steps": {"include": {"agent": True}, "order_by": {"order": "asc"}}}, + include={"steps": {"include": {"agent": True}, + "order_by": {"order": "asc"}}}, ) workflow_steps = [] @@ -250,7 +251,7 @@ def track_invocation(output): ) if enable_streaming: - logging.info("Streaming enabled. Preparing streaming response...") + logger.info("Streaming enabled. Preparing streaming response...") async def send_message() -> AsyncIterable[str]: try: @@ -300,7 +301,7 @@ async def send_message() -> AsyncIterable[str]: } ) - logging.error(f"Error in send_message: {error}") + logger.error(f"Error in send_message: {error}") finally: for workflow_step in workflow_steps: workflow_step["callbacks"]["streaming"].done.set() @@ -308,7 +309,7 @@ async def send_message() -> AsyncIterable[str]: generator = send_message() return StreamingResponse(generator, media_type="text/event-stream") - logging.info("Streaming not enabled. Invoking workflow synchronously...") + logger.info("Streaming not enabled. Invoking workflow synchronously...") output = await workflow.arun( input, ) diff --git a/libs/superagent/app/main.py b/libs/superagent/app/main.py index 2361d107e..9bcb66353 100644 --- a/libs/superagent/app/main.py +++ b/libs/superagent/app/main.py @@ -36,7 +36,7 @@ title="Superagent", docs_url="/", description="The Open Source AI Assistant Framework & API", - version="0.2.8", + version="0.2.9", servers=[{"url": config("SUPERAGENT_API_URL")}], ) diff --git a/libs/superagent/app/tools/__init__.py b/libs/superagent/app/tools/__init__.py index 8c9b0acfb..13d8470b0 100644 --- a/libs/superagent/app/tools/__init__.py +++ b/libs/superagent/app/tools/__init__.py @@ -46,6 +46,8 @@ from app.tools.wolfram_alpha import WolframAlpha from app.tools.zapier import ZapierNLA +logger = logging.getLogger(__name__) + TOOL_TYPE_MAPPING = { "AGENT": {"class": Agent, "schema": AgentInput}, "ALGOLIA": {"class": Algolia, "schema": AlgoliaInput}, @@ -91,7 +93,7 @@ def create_pydantic_model_from_object(obj: Dict[str, Any]) -> Any: if isinstance(value, dict): type = value.get("type") if not type: - logging.warning(f"Type not found for {key}, defaulting to string") + logger.warning(f"Type not found for {key}, defaulting to string") if "enum" in value: enum_values = value["enum"] enum_name = f"{key.capitalize()}Enum" diff --git a/libs/superagent/app/tools/browser.py b/libs/superagent/app/tools/browser.py index 583df28e9..c57d2a5fd 100644 --- a/libs/superagent/app/tools/browser.py +++ b/libs/superagent/app/tools/browser.py @@ -1,11 +1,14 @@ import aiohttp import requests -from bs4 import BeautifulSoup +import logging +from bs4 import BeautifulSoup, NavigableString, Tag, Comment from langchain_community.tools import BaseTool as LCBaseTool from pydantic import BaseModel, Field from app.tools.base import BaseTool +logger = logging.getLogger(__name__) + class LCBrowser(LCBaseTool): name = "Browser" @@ -24,13 +27,44 @@ async def _arun(self, url: str) -> str: async with aiohttp.ClientSession() as session: async with session.get(url) as response: html_content = await response.text() - soup = BeautifulSoup(html_content, "html.parser") - text = soup.get_text() - return text + + soup = BeautifulSoup(html_content, 'html.parser') + + def extract_text_with_links(element): + texts = [] + for child in element.children: + if isinstance(child, Comment): + continue + elif isinstance(child, NavigableString): + stripped_text = str(child).strip() + if stripped_text.startswith('xml') and stripped_text.endswith('"?'): + continue + if stripped_text: + texts.append(stripped_text) + elif isinstance(child, Tag): + if child.name == 'a': + link_text = child.get_text(strip=True) + href = child.get('href', '').strip() + if href and href != '/': + texts.append( + f"{link_text} {href}" if link_text else f"{href}") + elif child.name == 'iframe': + src = child.get('src', '').strip() + if src: + texts.append(src) + elif child.name not in ['script', 'style', 'noscript', 'xml']: + texts.append(extract_text_with_links(child)) + + return '\n'.join(filter(None, texts)) + + cleaned_text = extract_text_with_links(soup) + logger.debug(f"BROWSER TOOL result: {cleaned_text}") + return cleaned_text class BrowserArgs(BaseModel): - url: str = Field(..., description="A valid url including protocol to analyze") + url: str = Field(..., + description="A valid url including protocol to analyze") class Browser(BaseTool): diff --git a/libs/superagent/poetry.lock b/libs/superagent/poetry.lock index 407d9d38b..19b5ca051 100644 --- a/libs/superagent/poetry.lock +++ b/libs/superagent/poetry.lock @@ -4005,6 +4005,24 @@ files = [ [package.dependencies] setuptools = ">=41.0" +[[package]] +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "referencing" version = "0.32.0" @@ -4348,24 +4366,24 @@ python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, @@ -4373,7 +4391,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, @@ -4381,7 +4399,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, @@ -4389,7 +4407,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, @@ -5858,4 +5876,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.1, <3.12" -content-hash = "8ea9134de128d2c477bad4d6819cdd961cf1c4f481bc08058c5bbadf1ea86b61" +content-hash = "26959319946dde308b24427ab56c5beb842a1d3170189ac4e57b2dcffca48103" diff --git a/libs/superagent/pyproject.toml b/libs/superagent/pyproject.toml index eaa309240..2ae3a4bdf 100644 --- a/libs/superagent/pyproject.toml +++ b/libs/superagent/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "superagent" -version = "0.2.6" +version = "0.2.9" description = "" authors = ["Ismail Pelaseyed"] readme = "../../README.md" @@ -63,6 +63,7 @@ langchain-openai = "^0.0.5" python-docx = "^1.1.0" prisma = "^0.12.0" stripe = "^8.2.0" +redis = "^5.0.1"