note
- 新增工具:搜索引擎Agent
文章目錄
- note
- 一、搜索引擎Agent
- Reference
一、搜索引擎Agent
import os
from dotenv import load_dotenv# 加載環境變量
load_dotenv()
# 初始化變量
base_url = None
chat_model = None
api_key = None# 使用with語句打開文件,確保文件使用完畢后自動關閉
env_path = "/Users/guomiansheng/Desktop/LLM/llm_app/wow-agent/.env.txt"
with open(env_path, 'r') as file:# 逐行讀取文件for line in file:# 移除字符串頭尾的空白字符(包括'\n')line = line.strip()# 檢查并解析變量if "base_url" in line:base_url = line.split('=', 1)[1].strip().strip('"')elif "chat_model" in line:chat_model = line.split('=', 1)[1].strip().strip('"')elif "ZHIPU_API_KEY" in line:api_key = line.split('=', 1)[1].strip().strip('"')elif "BOCHA_API_KEY" in line:BOCHA_API_KEY = line.split('=', 1)[1].strip().strip('"')# 打印變量以驗證
print(f"base_url: {base_url}")
print(f"chat_model: {chat_model}")
print(f"ZHIPU_API_KEY: {api_key}")from openai import OpenAI
client = OpenAI(api_key = api_key,base_url = base_url
)
print(client)def get_completion(prompt):response = client.chat.completions.create(model="glm-4-flash", # 填寫需要調用的模型名稱messages=[{"role": "user", "content": prompt},],)return response.choices[0].message.content# 一、定義上個task的llm
from openai import OpenAI
from pydantic import Field # 導入Field,用于Pydantic模型中定義字段的元數據
from llama_index.core.llms import (CustomLLM,CompletionResponse,LLMMetadata,
)
from llama_index.core.embeddings import BaseEmbedding
from llama_index.core.llms.callbacks import llm_completion_callback
from typing import List, Any, Generator# 定義OurLLM類,繼承自CustomLLM基類
class OurLLM(CustomLLM):api_key: str = Field(default=api_key)base_url: str = Field(default=base_url)model_name: str = Field(default=chat_model)client: OpenAI = Field(default=None, exclude=True) # 顯式聲明 client 字段def __init__(self, api_key: str, base_url: str, model_name: str = chat_model, **data: Any):super().__init__(**data)self.api_key = api_keyself.base_url = base_urlself.model_name = model_nameself.client = OpenAI(api_key=self.api_key, base_url=self.base_url) # 使用傳入的api_key和base_url初始化 client 實例@propertydef metadata(self) -> LLMMetadata:"""Get LLM metadata."""return LLMMetadata(model_name=self.model_name,)@llm_completion_callback()def complete(self, prompt: str, **kwargs: Any) -> CompletionResponse:response = self.client.chat.completions.create(model=self.model_name, messages=[{"role": "user", "content": prompt}])if hasattr(response, 'choices') and len(response.choices) > 0:response_text = response.choices[0].message.contentreturn CompletionResponse(text=response_text)else:raise Exception(f"Unexpected response format: {response}")@llm_completion_callback()def stream_complete(self, prompt: str, **kwargs: Any) -> Generator[CompletionResponse, None, None]:response = self.client.chat.completions.create(model=self.model_name,messages=[{"role": "user", "content": prompt}],stream=True)try:for chunk in response:chunk_message = chunk.choices[0].deltaif not chunk_message.content:continuecontent = chunk_message.contentyield CompletionResponse(text=content, delta=content)except Exception as e:raise Exception(f"Unexpected response format: {e}")llm = OurLLM(api_key=api_key, base_url=base_url, model_name=chat_model)
# print(llm)
# 測試模型是否能正常回答
response = llm.stream_complete("你是誰?")
for chunk in response:print(chunk, end="", flush=True)# 二、搜索工具
from llama_index.core.tools import FunctionTool
import requests
# 需要先把BOCHA_API_KEY填寫到.env文件中去。
# BOCHA_API_KEY = os.getenv('BOCHA_API_KEY')# 定義Bocha Web Search工具
def bocha_web_search_tool(query: str, count: int = 8) -> str:"""使用Bocha Web Search API進行聯網搜索,返回搜索結果的字符串。參數:- query: 搜索關鍵詞- count: 返回的搜索結果數量返回:- 搜索結果的字符串形式"""url = 'https://api.bochaai.com/v1/web-search'headers = {'Authorization': f'Bearer {BOCHA_API_KEY}', # 請替換為你的API密鑰'Content-Type': 'application/json'}data = {"query": query,"freshness": "noLimit", # 搜索的時間范圍,例如 "oneDay", "oneWeek", "oneMonth", "oneYear", "noLimit""summary": True, # 是否返回長文本摘要總結"count": count}response = requests.post(url, headers=headers, json=data)if response.status_code == 200:# 返回給大模型的格式化的搜索結果文本# 可以自己對博查的搜索結果進行自定義處理return str(response.json())else:raise Exception(f"API請求失敗,狀態碼: {response.status_code}, 錯誤信息: {response.text}")search_tool = FunctionTool.from_defaults(fn=bocha_web_search_tool)
from llama_index.core.agent import ReActAgent
agent = ReActAgent.from_tools([search_tool], llm=llm, verbose=True, max_iterations=10)# 測試用例
query = "阿里巴巴2024年的ESG報告主要講了哪些內容?"
response = agent.chat(f"請幫我搜索以下內容:{query}")
print(response)
Reference
[1] https://github.com/datawhalechina/wow-agent
[2] https://www.datawhale.cn/learn/summary/86
[3] https://open.bochaai.com/
[4] https://github.com/run-llama/llama_index/issues/14843
[5] 官方文檔:https://docs.cloud.llamaindex.ai/