errchh
commited on
Commit
·
443733b
1
Parent(s):
2949c77
update llm
Browse files- __pycache__/agent.cpython-312.pyc +0 -0
- __pycache__/prompts.cpython-312.pyc +0 -0
- agent.py +9 -5
- prompts.py +19 -1
__pycache__/agent.cpython-312.pyc
CHANGED
|
Binary files a/__pycache__/agent.cpython-312.pyc and b/__pycache__/agent.cpython-312.pyc differ
|
|
|
__pycache__/prompts.cpython-312.pyc
CHANGED
|
Binary files a/__pycache__/prompts.cpython-312.pyc and b/__pycache__/prompts.cpython-312.pyc differ
|
|
|
agent.py
CHANGED
|
@@ -135,7 +135,7 @@ def search_arxiv(query: str) -> str:
|
|
| 135 |
|
| 136 |
|
| 137 |
# build retriever
|
| 138 |
-
|
| 139 |
|
| 140 |
|
| 141 |
# init system message
|
|
@@ -148,8 +148,8 @@ tools = [
|
|
| 148 |
multiply,
|
| 149 |
divide,
|
| 150 |
modulus,
|
| 151 |
-
search_wiki,
|
| 152 |
search_web,
|
|
|
|
| 153 |
search_arxiv
|
| 154 |
]
|
| 155 |
|
|
@@ -157,10 +157,14 @@ tools = [
|
|
| 157 |
# build graph function
|
| 158 |
def build_graph():
|
| 159 |
# llm
|
| 160 |
-
llm =
|
| 161 |
-
model="
|
| 162 |
-
temperature=0
|
| 163 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 164 |
print(f"DEBUG: llm object = {llm}")
|
| 165 |
|
| 166 |
# bind tools to llm
|
|
|
|
| 135 |
|
| 136 |
|
| 137 |
# build retriever
|
| 138 |
+
bm25_retriever = BM25Retriever.from_documents(docs)
|
| 139 |
|
| 140 |
|
| 141 |
# init system message
|
|
|
|
| 148 |
multiply,
|
| 149 |
divide,
|
| 150 |
modulus,
|
|
|
|
| 151 |
search_web,
|
| 152 |
+
search_wiki,
|
| 153 |
search_arxiv
|
| 154 |
]
|
| 155 |
|
|
|
|
| 157 |
# build graph function
|
| 158 |
def build_graph():
|
| 159 |
# llm
|
| 160 |
+
llm = ChatGoogleGenerativeAI(
|
| 161 |
+
model="gemini-2.5-flash-preview-04-17",
|
| 162 |
+
temperature=0
|
| 163 |
)
|
| 164 |
+
# llm = ChatGroq(
|
| 165 |
+
# model="meta-llama/llama-4-scout-17b-16e-instruct",
|
| 166 |
+
# temperature=0.1,
|
| 167 |
+
# )
|
| 168 |
print(f"DEBUG: llm object = {llm}")
|
| 169 |
|
| 170 |
# bind tools to llm
|
prompts.py
CHANGED
|
@@ -1 +1,19 @@
|
|
| 1 |
-
system_prompt = "You are a
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
system_prompt = """You are a helpful assistant tasked with answering questions using a set of tools.
|
| 2 |
+
|
| 3 |
+
Your final response must be presented in one single line, strictly following this exact format:
|
| 4 |
+
FINAL ANSWER: [The concise answer to the user's question]
|
| 5 |
+
|
| 6 |
+
Include only this line and nothing else in your final output.
|
| 7 |
+
|
| 8 |
+
Examples of valid final responses:
|
| 9 |
+
FINAL ANSWER: FunkMonk
|
| 10 |
+
FINAL ANSWER: Paris
|
| 11 |
+
FINAL ANSWER: 128
|
| 12 |
+
FINAL ANSWER: 15 January 2001
|
| 13 |
+
|
| 14 |
+
If the current question is identical to a question you have already answered, you must provide the stored final answer in the required "FINAL ANSWER: [ANSWER]" format. Do not use tools in this case.
|
| 15 |
+
|
| 16 |
+
If the question is new or different, use your available tools to determine the answer, and then format that answer into the required "FINAL ANSWER: [ANSWER]" format.
|
| 17 |
+
|
| 18 |
+
Strict adherence to the "FINAL ANSWER: [ANSWER]" format is mandatory for your response.
|
| 19 |
+
"""
|