feat: add agents/utils/llm_api
This commit is contained in:
parent
ad7c611476
commit
f54e567d7d
24
agents/README.md
Normal file
24
agents/README.md
Normal file
@ -0,0 +1,24 @@
|
||||
# EmoLLM——MetaGPT
|
||||
|
||||
## 使用
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
pip install .
|
||||
```
|
||||
|
||||
## key 配置
|
||||
|
||||
为确保项目正常运行,请在项目内新建`.env`文件,并在其中设置你的API密钥,你可以根据下列例子写入对应的 key,即可成功运行调用,目前默认使用 zhipuai,你可以仅写入`ZHIPUAI_API_KEY`即可使用。
|
||||
|
||||
```bash
|
||||
OPENAI_API_KEY=
|
||||
OPENAI_API_BASE=
|
||||
ZHIPUAI_API_KEY=
|
||||
BAIDU_API_KEY=
|
||||
OPENAI_API_MODEL=
|
||||
```
|
||||
|
||||
## MetaGPT Version
|
||||
|
||||
https://gitee.com/jujimeizuo/meta-gpt-tianji
|
67
agents/utils/common_llm_api.py
Normal file
67
agents/utils/common_llm_api.py
Normal file
@ -0,0 +1,67 @@
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import erniebot
|
||||
from zhipuai import ZhipuAI
|
||||
from metagpt.logs import logger
|
||||
|
||||
|
||||
|
||||
class BaiduAPI:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def _aask(self, prompt, stream=False, model="ernie-4.0", top_p=0.95):
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = erniebot.ChatCompletion.create(
|
||||
model=model, messages=messages, top_p=top_p, stream=stream
|
||||
)
|
||||
return response.result
|
||||
|
||||
|
||||
class ZhipuAPI:
|
||||
def __init__(self, glm=None):
|
||||
if glm is None:
|
||||
raise RuntimeError("ZhipuApi is Error!")
|
||||
self.glm = glm
|
||||
|
||||
async def _aask(self, prompt, stream=False, model="glm-3-turbo", top_p=0.95):
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
response = self.glm.chat.completions.create(
|
||||
model=model, messages=messages, top_p=top_p, stream=stream
|
||||
)
|
||||
return response.choices[0].message.content
|
||||
|
||||
|
||||
class LLMAPI:
|
||||
def __init__(self):
|
||||
self.llm_api = None
|
||||
|
||||
# select api
|
||||
if os.environ["ZHIPUAI_API_KEY"] is not None:
|
||||
glm = ZhipuAI(api_key=os.environ["ZHIPUAI_API_KEY"])
|
||||
self.llm_api = ZhipuAPI(glm=glm)
|
||||
elif os.environ["BAIDU_API_KEY"] is not None:
|
||||
erniebot.api_type = "aistudio"
|
||||
erniebot.access_token = os.environ["BAIDU_API_KEY"]
|
||||
self.llm_api = BaiduAPI()
|
||||
else:
|
||||
raise RuntimeError("No api_key found!")
|
||||
|
||||
# 这里的 model 的 default value 逻辑不对,应该是根据 api_type 来决定,不一定必须是 zhipuai
|
||||
async def _aask(self, prompt, stream=False, model="glm-3-turbo", top_p=0.95):
|
||||
logger.info(f"call llm_api, response is below")
|
||||
rsp = await self.llm_api._aask(prompt, stream=stream, model=model, top_p=top_p)
|
||||
return rsp
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# models = erniebot.Model.list()
|
||||
# print("可用模型",models)
|
||||
|
||||
llm_api = LLMAPI()
|
||||
# result = asyncio.run(baidu_api._aask("你好啊"))
|
||||
result = asyncio.run(llm_api._aask("你好啊"))
|
||||
print("result", result)
|
Loading…
Reference in New Issue
Block a user