Première version agent
Dérivé TP3
This commit is contained in:
BIN
AgentReact/utils/__pycache__/nodes.cpython-312.pyc
Normal file
BIN
AgentReact/utils/__pycache__/nodes.cpython-312.pyc
Normal file
Binary file not shown.
BIN
AgentReact/utils/__pycache__/state.cpython-312.pyc
Normal file
BIN
AgentReact/utils/__pycache__/state.cpython-312.pyc
Normal file
Binary file not shown.
BIN
AgentReact/utils/__pycache__/tools.cpython-312.pyc
Normal file
BIN
AgentReact/utils/__pycache__/tools.cpython-312.pyc
Normal file
Binary file not shown.
42
AgentReact/utils/nodes.py
Normal file
42
AgentReact/utils/nodes.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from langchain_mistralai import ChatMistralAI
|
||||
from langgraph.graph import MessagesState
|
||||
from langgraph.prebuilt import ToolNode
|
||||
from langchain.chat_models import init_chat_model
|
||||
from langgraph.graph import START, END
|
||||
|
||||
from .tools import getTools
|
||||
|
||||
# LLM principal
|
||||
llm = ChatMistralAI( # LLM sans outils
|
||||
model="mistral-large-latest",
|
||||
temperature=0,
|
||||
max_retries=2,
|
||||
)
|
||||
|
||||
# NODES
|
||||
def reponse_question(state: MessagesState):
|
||||
"""Noeud qui réponds à la question, en s'aidant si besoin des outils à disposition"""
|
||||
# Initialisation du LLM
|
||||
model = llm.bind_tools(getTools())
|
||||
|
||||
# Appel du LLM
|
||||
return {"messages": [model.invoke(state["messages"])]}
|
||||
|
||||
tool_node = ToolNode(tools=getTools()) # Node gérant les outils
|
||||
|
||||
# fonction de routage : Après reponse_question, si le LLM veut appeler un outil, on va au tool_node, sinon on termine
|
||||
def should_continue(state: MessagesState):
|
||||
"""
|
||||
Use in the conditional_edge to route to the ToolNode if the last message
|
||||
has tool calls. Otherwise, route to the end.
|
||||
"""
|
||||
if isinstance(state, list):
|
||||
ai_message = state[-1]
|
||||
elif messages := state.get("messages", []):
|
||||
ai_message = messages[-1]
|
||||
else:
|
||||
raise ValueError(f"No messages found in input state to tool_edge: {state}")
|
||||
|
||||
if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0:
|
||||
return "tools"
|
||||
return END
|
||||
13
AgentReact/utils/state.py
Normal file
13
AgentReact/utils/state.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from langgraph.graph import StateGraph, MessagesState
|
||||
|
||||
class hjgzefvuiyguhzfvihuozdef(MessagesState): # J'ai du mal à nommer mes classes ._.
|
||||
pass
|
||||
|
||||
def getState()->StateGraph:
|
||||
"""
|
||||
Retourne un StateGraph prêt à utiliser pour préparer un workflow
|
||||
|
||||
Returns:
|
||||
StateGraph: prêt à utiliser
|
||||
"""
|
||||
return StateGraph(hjgzefvuiyguhzfvihuozdef)
|
||||
37
AgentReact/utils/tools.py
Normal file
37
AgentReact/utils/tools.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from langchain.tools import tool
|
||||
from tavily import TavilyClient
|
||||
from typing import List
|
||||
|
||||
@tool
|
||||
def internet_search(query: str)->dict:
|
||||
"""
|
||||
Rechercher une information sur internet
|
||||
|
||||
Args:
|
||||
query (str): Terme recherché
|
||||
Returns:
|
||||
dict: Retour de la recherche
|
||||
"""
|
||||
return TavilyClient().search(query, model='auto')
|
||||
|
||||
|
||||
@tool
|
||||
def write_file(content: str) -> str:
|
||||
"""
|
||||
Écrire les données dans un fichier
|
||||
|
||||
Args:
|
||||
content (str): Contenu du fichier à écrire
|
||||
|
||||
Returns:
|
||||
str: Résultat de l'écriture
|
||||
"""
|
||||
print("==ECRITURE FICHIER==")
|
||||
print(content)
|
||||
return "Fichier écrit"
|
||||
|
||||
def getTools()->List['Tools']:
|
||||
"""
|
||||
Récupérer la liste des tools
|
||||
"""
|
||||
return [internet_search, write_file]
|
||||
Reference in New Issue
Block a user