Première version agent
Dérivé TP3
This commit is contained in:
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
.venv/
|
||||||
|
.env
|
||||||
BIN
AgentReact/__pycache__/agent.cpython-312.pyc
Normal file
BIN
AgentReact/__pycache__/agent.cpython-312.pyc
Normal file
Binary file not shown.
32
AgentReact/agent.py
Normal file
32
AgentReact/agent.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from langgraph.graph import START, END
|
||||||
|
from langgraph.graph.state import CompiledStateGraph
|
||||||
|
|
||||||
|
from utils.nodes import reponse_question, tool_node, should_continue
|
||||||
|
from utils.state import getState
|
||||||
|
|
||||||
|
def getGraph()->CompiledStateGraph:
|
||||||
|
"""
|
||||||
|
Récupérer le graphe compilé et prêt à invoquer
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CompiledStateGraph: Graphe compilé
|
||||||
|
"""
|
||||||
|
workflow = getState() # State prêt à utiliser
|
||||||
|
|
||||||
|
# Définition des sommets du graphe
|
||||||
|
workflow.add_node(reponse_question)
|
||||||
|
workflow.add_node("tool_node", tool_node) # N'est pas une fonction, mais une classe instanciée, je dois précisier le nom du node
|
||||||
|
|
||||||
|
# Arrêtes
|
||||||
|
workflow.set_entry_point("reponse_question")
|
||||||
|
workflow.add_edge("tool_node", "reponse_question")
|
||||||
|
workflow.add_conditional_edges("reponse_question", should_continue, {
|
||||||
|
"tools":"tool_node",
|
||||||
|
END:END
|
||||||
|
})
|
||||||
|
|
||||||
|
return workflow.compile()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Affichage du graphe
|
||||||
|
getGraph().get_graph().draw_mermaid_png(output_file_path="agent.png")
|
||||||
10
AgentReact/start.py
Normal file
10
AgentReact/start.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from dotenv import load_dotenv
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
from langchain.messages import HumanMessage, SystemMessage, AIMessage, ToolMessage
|
||||||
|
|
||||||
|
from agent import getGraph
|
||||||
|
|
||||||
|
out_state = getGraph().invoke({'messages':[HumanMessage("What's the price for bitcoin ?")]})
|
||||||
|
for message in out_state['messages']:
|
||||||
|
message.pretty_print()
|
||||||
BIN
AgentReact/utils/__pycache__/nodes.cpython-312.pyc
Normal file
BIN
AgentReact/utils/__pycache__/nodes.cpython-312.pyc
Normal file
Binary file not shown.
BIN
AgentReact/utils/__pycache__/state.cpython-312.pyc
Normal file
BIN
AgentReact/utils/__pycache__/state.cpython-312.pyc
Normal file
Binary file not shown.
BIN
AgentReact/utils/__pycache__/tools.cpython-312.pyc
Normal file
BIN
AgentReact/utils/__pycache__/tools.cpython-312.pyc
Normal file
Binary file not shown.
42
AgentReact/utils/nodes.py
Normal file
42
AgentReact/utils/nodes.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
from langchain_mistralai import ChatMistralAI
|
||||||
|
from langgraph.graph import MessagesState
|
||||||
|
from langgraph.prebuilt import ToolNode
|
||||||
|
from langchain.chat_models import init_chat_model
|
||||||
|
from langgraph.graph import START, END
|
||||||
|
|
||||||
|
from .tools import getTools
|
||||||
|
|
||||||
|
# LLM principal
|
||||||
|
llm = ChatMistralAI( # LLM sans outils
|
||||||
|
model="mistral-large-latest",
|
||||||
|
temperature=0,
|
||||||
|
max_retries=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
# NODES
|
||||||
|
def reponse_question(state: MessagesState):
|
||||||
|
"""Noeud qui réponds à la question, en s'aidant si besoin des outils à disposition"""
|
||||||
|
# Initialisation du LLM
|
||||||
|
model = llm.bind_tools(getTools())
|
||||||
|
|
||||||
|
# Appel du LLM
|
||||||
|
return {"messages": [model.invoke(state["messages"])]}
|
||||||
|
|
||||||
|
tool_node = ToolNode(tools=getTools()) # Node gérant les outils
|
||||||
|
|
||||||
|
# fonction de routage : Après reponse_question, si le LLM veut appeler un outil, on va au tool_node, sinon on termine
|
||||||
|
def should_continue(state: MessagesState):
|
||||||
|
"""
|
||||||
|
Use in the conditional_edge to route to the ToolNode if the last message
|
||||||
|
has tool calls. Otherwise, route to the end.
|
||||||
|
"""
|
||||||
|
if isinstance(state, list):
|
||||||
|
ai_message = state[-1]
|
||||||
|
elif messages := state.get("messages", []):
|
||||||
|
ai_message = messages[-1]
|
||||||
|
else:
|
||||||
|
raise ValueError(f"No messages found in input state to tool_edge: {state}")
|
||||||
|
|
||||||
|
if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0:
|
||||||
|
return "tools"
|
||||||
|
return END
|
||||||
13
AgentReact/utils/state.py
Normal file
13
AgentReact/utils/state.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from langgraph.graph import StateGraph, MessagesState
|
||||||
|
|
||||||
|
class hjgzefvuiyguhzfvihuozdef(MessagesState): # J'ai du mal à nommer mes classes ._.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getState()->StateGraph:
|
||||||
|
"""
|
||||||
|
Retourne un StateGraph prêt à utiliser pour préparer un workflow
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
StateGraph: prêt à utiliser
|
||||||
|
"""
|
||||||
|
return StateGraph(hjgzefvuiyguhzfvihuozdef)
|
||||||
37
AgentReact/utils/tools.py
Normal file
37
AgentReact/utils/tools.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from langchain.tools import tool
|
||||||
|
from tavily import TavilyClient
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def internet_search(query: str)->dict:
|
||||||
|
"""
|
||||||
|
Rechercher une information sur internet
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query (str): Terme recherché
|
||||||
|
Returns:
|
||||||
|
dict: Retour de la recherche
|
||||||
|
"""
|
||||||
|
return TavilyClient().search(query, model='auto')
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def write_file(content: str) -> str:
|
||||||
|
"""
|
||||||
|
Écrire les données dans un fichier
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content (str): Contenu du fichier à écrire
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Résultat de l'écriture
|
||||||
|
"""
|
||||||
|
print("==ECRITURE FICHIER==")
|
||||||
|
print(content)
|
||||||
|
return "Fichier écrit"
|
||||||
|
|
||||||
|
def getTools()->List['Tools']:
|
||||||
|
"""
|
||||||
|
Récupérer la liste des tools
|
||||||
|
"""
|
||||||
|
return [internet_search, write_file]
|
||||||
18
requirements.txt
Normal file
18
requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
beautifulsoup4>=4.14.2
|
||||||
|
bs4>=0.0.2
|
||||||
|
einops>=0.8.1
|
||||||
|
jupyterlab>=4.5.0
|
||||||
|
jupyterlab-slideshow>=0.3.4
|
||||||
|
langchain>=1.1.0
|
||||||
|
langchain-chroma>=1.0.0
|
||||||
|
langchain-community>=0.4.1
|
||||||
|
langchain-huggingface>=1.1.0
|
||||||
|
langchain-mcp-adapters>=0.2.1
|
||||||
|
langchain-mistralai>=1.1.0
|
||||||
|
langchain-text-splitters>=1.0.0
|
||||||
|
sentence-transformers>=5.1.2
|
||||||
|
tavily-python>=0.7.14
|
||||||
|
unstructured[all-docs]>=0.18.21
|
||||||
|
ipywidgets
|
||||||
|
peft
|
||||||
|
python-dotenv
|
||||||
Reference in New Issue
Block a user