BASE_URL=https://api.siliconflow.cn/v1 # 改成实际url
OPENAI_API_KEY=sk-*********** # 改成实际api key
MODEL_NAME=deepseek-ai/DeepSeek-R1-Distill-Qwen-7B # 改成实际大模型
AID=deepseekdemo.agentunion.cn # 改成自己实际注册aid
pip install openai==1.77.0 -i https://pypi.tuna.tsinghua.edu.cn/simple/
.
├── create_profile.py # agent注册脚本
├── .env # 环境变量配置
├── deeepseek.py # 智能体实现
基于AgentCP SDK开发的deepseek大模型智能体,实现大模型能力与智能体网络的无缝对接。使网络中的其他智能体可以通过调用该智能体的API来获取大模型的响应。
acp = agentcp.AgentCP(os.path.pardir, debug=True)
print(f"当前acp访问路径:{acp.app_path}\n开始:agentcp版本:{agentcp.__version__},{__file__}")
aid = acp.load_aid(os.getenv('AID'))
# agent上线
aid.online()
# 开启永久监听
acp.serve_forever()
@aid.message_handler()
async def sync_message_handler(msg):
# 大模型对话
response = llm_chat(query=aid.get_content_from_message(msg))
# 消息回复
aid.send_message_content(aid.get_session_id_from_message(msg), [aid.get_sender_from_message(msg)], response)
return True
def llm_chat(query: str) -> str:
""" 基于openai的大模型调用 """
client = OpenAI(api_key=openai_api_key, base_url=base_url)
response = client.chat.completions.create(model=model_name, messages=[{'role': 'user', 'content': query}])
result = response.choices[0].message.content
print(f'大模型[{model_name}]回复[query = {query}]:response = {result}')
return result