这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions fern/mdx/deploy/local.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,11 @@ MEMORY_API_URL=https://memory.superagent.sh

# OPTIONAL VARIABLES

# AgentOps
# Automatically track costs, session replays, and agent benchmarks. Sign up for a key at [agentops.ai](https://agentops.ai). # Using the default org key in .env.example will include added premium features.
AGENTOPS_API_KEY=<YOUR_AGENTOPS_API_KEY>
AGENTOPS_ORG_KEY=<YOUR_ORG_KEY>

# Langfuse observability
LANGFUSE_PUBLIC_KEY=<LANGFUSE_PUBLIC_KEY>
LANGFUSE_SECRET_KEY=<LANGFUSE_SECRET_KEY>
Expand Down
7 changes: 7 additions & 0 deletions libs/superagent/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ ASTRA_DB_KEYSPACE_NAME=
E2B_API_KEY=e2b_21b611cdf96fad06a6a819708734be20cfe8b777
# Replicate LLM/tool
REPLICATE_API_TOKEN=
# AgentOps session tracking
AGENTOPS_API_KEY=
AGENTOPS_ORG_KEY=843bf677-e691-45ad-97cf-909e99f9ad83
# Langfuse tracing
LANGFUSE_PUBLIC_KEY=
LANGFUSE_SECRET_KEY=
Expand All @@ -42,7 +45,11 @@ LANGCHAIN_TRACING_V2=False
LANGCHAIN_ENDPOINT="https://api.smith.langchain.com"
LANGCHAIN_API_KEY=
LANGSMITH_PROJECT_ID=
# Agentops tracking
AGENTOPS_API_KEY=
AGENTOPS_ORG_KEY=
# Finetunes
LAMINI_API_KEY=
# Tracking
SEGMENT_WRITE_KEY=

9 changes: 9 additions & 0 deletions libs/superagent/app/agents/base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
from typing import Any, List, Optional

from agentops.langchain_callback_handler import (
AsyncCallbackHandler,
LangchainCallbackHandler,
)

from app.models.request import LLMParams
from app.utils.streaming import CustomAsyncIteratorCallbackHandler
from prisma.models import Agent, AgentDatasource, AgentLLM, AgentTool
Expand All @@ -18,9 +23,11 @@ def __init__(
enable_streaming: bool = False,
output_schema: str = None,
callback: CustomAsyncIteratorCallbackHandler = None,
session_tracker: LangchainCallbackHandler | AsyncCallbackHandler = None,
llm_params: Optional[LLMParams] = {},
agent_config: Agent = None,
):
self.session_tracker = session_tracker
self.agent_id = agent_id
self.session_id = session_id
self.enable_streaming = enable_streaming
Expand Down Expand Up @@ -53,6 +60,7 @@ async def get_agent(self):
enable_streaming=self.enable_streaming,
output_schema=self.output_schema,
callback=self.callback,
session_tracker=self.session_tracker,
llm_params=self.llm_params,
)
else:
Expand All @@ -64,6 +72,7 @@ async def get_agent(self):
enable_streaming=self.enable_streaming,
output_schema=self.output_schema,
callback=self.callback,
session_tracker=self.session_tracker,
)

return await agent.get_agent(config=self.agent_config)
10 changes: 8 additions & 2 deletions libs/superagent/app/agents/langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,20 +111,26 @@ async def _get_llm(self, agent_llm: AgentLLM, model: str) -> Any:
**(self.llm_params.dict() if self.llm_params else {}),
}

callbacks = []
if self.enable_streaming:
callbacks.append(self.callback)
if self.session_tracker:
callbacks.append(self.session_tracker)

if agent_llm.llm.provider == "OPENAI":
return ChatOpenAI(
model=LLM_MAPPING[model],
openai_api_key=agent_llm.llm.apiKey,
streaming=self.enable_streaming,
callbacks=[self.callback] if self.enable_streaming else [],
callbacks=callbacks,
**(agent_llm.llm.options if agent_llm.llm.options else {}),
**(llm_params),
)
elif agent_llm.llm.provider == "AZURE_OPENAI":
return AzureChatOpenAI(
api_key=agent_llm.llm.apiKey,
streaming=self.enable_streaming,
callbacks=[self.callback] if self.enable_streaming else [],
callbacks=callbacks,
**(agent_llm.llm.options if agent_llm.llm.options else {}),
**(llm_params),
)
Expand Down
18 changes: 17 additions & 1 deletion libs/superagent/app/api/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import AsyncIterable

import segment.analytics as analytics
from agentops.langchain_callback_handler import AsyncLangchainCallbackHandler
from decouple import config
from fastapi import APIRouter, Depends
from fastapi.responses import JSONResponse, StreamingResponse
Expand Down Expand Up @@ -146,7 +147,6 @@ async def get(agent_id: str, api_user=Depends(get_current_api_user)):
"llms": {"include": {"llm": True}},
},
)
print(data)
for llm in data.llms:
llm.llm.options = json.dumps(llm.llm.options)
for tool in data.tools:
Expand Down Expand Up @@ -233,6 +233,17 @@ async def invoke(
)
langfuse_handler = trace.get_langchain_handler()

agentops_api_key = config("AGENTOPS_API_KEY", default=None)
agentops_org_key = config("AGENTOPS_ORG_KEY", default=None)

agentops_handler = None
if agentops_api_key and agentops_org_key:
agentops_handler = AsyncLangchainCallbackHandler(
api_key=agentops_api_key,
org_key=agentops_org_key,
tags=[agent_id, str(body.sessionId)],
)

agent_config = await prisma.agent.find_unique_or_raise(
where={"id": agent_id},
include={
Expand Down Expand Up @@ -281,6 +292,9 @@ def track_agent_invocation(result):
if langfuse_handler:
agentCallbacks.append(langfuse_handler)

if agentops_handler:
agentCallbacks.append(agentops_handler)

async def send_message(
agent: LLMChain | AgentExecutor,
content: str,
Expand Down Expand Up @@ -337,6 +351,7 @@ async def send_message(
enable_streaming=enable_streaming,
output_schema=output_schema,
callback=callback,
session_tracker=agentops_handler,
llm_params=body.llm_params,
agent_config=agent_config,
).get_agent()
Expand All @@ -348,6 +363,7 @@ async def send_message(
return StreamingResponse(generator, media_type="text/event-stream")

logging.info("Streaming not enabled. Invoking agent synchronously...")

output = await agent.acall(
inputs={"input": input}, tags=[agent_id], callbacks=agentCallbacks
)
Expand Down
13 changes: 13 additions & 0 deletions libs/superagent/app/api/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import AsyncIterable

import segment.analytics as analytics
from agentops.langchain_callback_handler import AsyncLangchainCallbackHandler
from decouple import config
from fastapi import APIRouter, Depends
from fastapi.responses import StreamingResponse
Expand Down Expand Up @@ -174,10 +175,18 @@ async def invoke(
input = body.input
enable_streaming = body.enableStreaming

agentops_api_key = config("AGENTOPS_API_KEY")
agentops_org_key = config("AGENTOPS_ORG_KEY")

agentops_handler = AsyncLangchainCallbackHandler(
api_key=agentops_api_key, org_key=agentops_org_key, tags=[session_id]
)

workflow = WorkflowBase(
workflow=workflowData,
enable_streaming=enable_streaming,
callbacks=[workflowStep["callback"] for workflowStep in workflowSteps],
session_tracker=agentops_handler,
session_id=session_id,
)

Expand Down Expand Up @@ -221,6 +230,10 @@ async def send_message() -> AsyncIterable[str]:
input,
)

# End session
agentops_handler.ao_client.end_session(
"Success", end_state_reason="Workflow completed"
)
return {"success": True, "data": output}


Expand Down
9 changes: 9 additions & 0 deletions libs/superagent/app/workflows/base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
import asyncio
from typing import Any, List

from agentops.langchain_callback_handler import (
AsyncCallbackHandler,
LangchainCallbackHandler,
)
from decouple import config

from app.agents.base import AgentBase
from app.utils.prisma import prisma
from app.utils.streaming import CustomAsyncIteratorCallbackHandler
Expand All @@ -13,11 +19,13 @@ def __init__(
workflow: Workflow,
callbacks: List[CustomAsyncIteratorCallbackHandler],
session_id: str,
session_tracker: LangchainCallbackHandler | AsyncCallbackHandler = None,
enable_streaming: bool = False,
):
self.workflow = workflow
self.enable_streaming = enable_streaming
self.session_id = session_id
self.session_tracker = session_tracker
self.callbacks = callbacks

async def arun(self, input: Any):
Expand All @@ -41,6 +49,7 @@ async def arun(self, input: Any):
agent_id=step.agentId,
enable_streaming=True,
callback=self.callbacks[stepIndex],
session_tracker=self.session_tracker,
session_id=self.session_id,
agent_config=agent_config,
).get_agent()
Expand Down
35 changes: 28 additions & 7 deletions libs/superagent/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions libs/superagent/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ weaviate-client = "^3.25.3"
qdrant-client = "^1.6.9"
langfuse = "^2.6.3"
vecs = "^0.4.2"
agentops = {extras = ["langchain"], version = "^0.0.20"}


[build-system]
Expand Down