commit 75008b05e48e1957bfb483327f3a44f430c17c58 Author: LJ5O <75009579+LJ5O@users.noreply.github.com> Date: Tue Feb 3 15:22:32 2026 +0100 Première version agent Dérivé TP3 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..71b98ca --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.venv/ +.env \ No newline at end of file diff --git a/AgentReact/__pycache__/agent.cpython-312.pyc b/AgentReact/__pycache__/agent.cpython-312.pyc new file mode 100644 index 0000000..ffe08d0 Binary files /dev/null and b/AgentReact/__pycache__/agent.cpython-312.pyc differ diff --git a/AgentReact/agent.py b/AgentReact/agent.py new file mode 100644 index 0000000..39464fd --- /dev/null +++ b/AgentReact/agent.py @@ -0,0 +1,32 @@ +from langgraph.graph import START, END +from langgraph.graph.state import CompiledStateGraph + +from utils.nodes import reponse_question, tool_node, should_continue +from utils.state import getState + +def getGraph()->CompiledStateGraph: + """ + Récupérer le graphe compilé et prêt à invoquer + + Returns: + CompiledStateGraph: Graphe compilé + """ + workflow = getState() # State prêt à utiliser + + # Définition des sommets du graphe + workflow.add_node(reponse_question) + workflow.add_node("tool_node", tool_node) # N'est pas une fonction, mais une classe instanciée, je dois précisier le nom du node + + # Arrêtes + workflow.set_entry_point("reponse_question") + workflow.add_edge("tool_node", "reponse_question") + workflow.add_conditional_edges("reponse_question", should_continue, { + "tools":"tool_node", + END:END + }) + + return workflow.compile() + +if __name__ == "__main__": + # Affichage du graphe + getGraph().get_graph().draw_mermaid_png(output_file_path="agent.png") \ No newline at end of file diff --git a/AgentReact/start.py b/AgentReact/start.py new file mode 100644 index 0000000..66d1807 --- /dev/null +++ b/AgentReact/start.py @@ -0,0 +1,10 @@ +from dotenv import load_dotenv +load_dotenv() + +from langchain.messages import HumanMessage, SystemMessage, AIMessage, ToolMessage + +from agent import getGraph + +out_state = getGraph().invoke({'messages':[HumanMessage("What's the price for bitcoin ?")]}) +for message in out_state['messages']: + message.pretty_print() \ No newline at end of file diff --git a/AgentReact/utils/__pycache__/nodes.cpython-312.pyc b/AgentReact/utils/__pycache__/nodes.cpython-312.pyc new file mode 100644 index 0000000..fec14c7 Binary files /dev/null and b/AgentReact/utils/__pycache__/nodes.cpython-312.pyc differ diff --git a/AgentReact/utils/__pycache__/state.cpython-312.pyc b/AgentReact/utils/__pycache__/state.cpython-312.pyc new file mode 100644 index 0000000..a20b8dd Binary files /dev/null and b/AgentReact/utils/__pycache__/state.cpython-312.pyc differ diff --git a/AgentReact/utils/__pycache__/tools.cpython-312.pyc b/AgentReact/utils/__pycache__/tools.cpython-312.pyc new file mode 100644 index 0000000..d204089 Binary files /dev/null and b/AgentReact/utils/__pycache__/tools.cpython-312.pyc differ diff --git a/AgentReact/utils/nodes.py b/AgentReact/utils/nodes.py new file mode 100644 index 0000000..00e7ef4 --- /dev/null +++ b/AgentReact/utils/nodes.py @@ -0,0 +1,42 @@ +from langchain_mistralai import ChatMistralAI +from langgraph.graph import MessagesState +from langgraph.prebuilt import ToolNode +from langchain.chat_models import init_chat_model +from langgraph.graph import START, END + +from .tools import getTools + +# LLM principal +llm = ChatMistralAI( # LLM sans outils + model="mistral-large-latest", + temperature=0, + max_retries=2, +) + +# NODES +def reponse_question(state: MessagesState): + """Noeud qui réponds à la question, en s'aidant si besoin des outils à disposition""" + # Initialisation du LLM + model = llm.bind_tools(getTools()) + + # Appel du LLM + return {"messages": [model.invoke(state["messages"])]} + +tool_node = ToolNode(tools=getTools()) # Node gérant les outils + +# fonction de routage : Après reponse_question, si le LLM veut appeler un outil, on va au tool_node, sinon on termine +def should_continue(state: MessagesState): + """ + Use in the conditional_edge to route to the ToolNode if the last message + has tool calls. Otherwise, route to the end. + """ + if isinstance(state, list): + ai_message = state[-1] + elif messages := state.get("messages", []): + ai_message = messages[-1] + else: + raise ValueError(f"No messages found in input state to tool_edge: {state}") + + if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0: + return "tools" + return END \ No newline at end of file diff --git a/AgentReact/utils/state.py b/AgentReact/utils/state.py new file mode 100644 index 0000000..da1fa34 --- /dev/null +++ b/AgentReact/utils/state.py @@ -0,0 +1,13 @@ +from langgraph.graph import StateGraph, MessagesState + +class hjgzefvuiyguhzfvihuozdef(MessagesState): # J'ai du mal à nommer mes classes ._. + pass + +def getState()->StateGraph: + """ + Retourne un StateGraph prêt à utiliser pour préparer un workflow + + Returns: + StateGraph: prêt à utiliser + """ + return StateGraph(hjgzefvuiyguhzfvihuozdef) \ No newline at end of file diff --git a/AgentReact/utils/tools.py b/AgentReact/utils/tools.py new file mode 100644 index 0000000..503531f --- /dev/null +++ b/AgentReact/utils/tools.py @@ -0,0 +1,37 @@ +from langchain.tools import tool +from tavily import TavilyClient +from typing import List + +@tool +def internet_search(query: str)->dict: + """ + Rechercher une information sur internet + + Args: + query (str): Terme recherché + Returns: + dict: Retour de la recherche + """ + return TavilyClient().search(query, model='auto') + + +@tool +def write_file(content: str) -> str: + """ + Écrire les données dans un fichier + + Args: + content (str): Contenu du fichier à écrire + + Returns: + str: Résultat de l'écriture + """ + print("==ECRITURE FICHIER==") + print(content) + return "Fichier écrit" + +def getTools()->List['Tools']: + """ + Récupérer la liste des tools + """ + return [internet_search, write_file] \ No newline at end of file diff --git a/agent.png b/agent.png new file mode 100644 index 0000000..89e34a0 Binary files /dev/null and b/agent.png differ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..78de078 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,18 @@ + beautifulsoup4>=4.14.2 + bs4>=0.0.2 + einops>=0.8.1 + jupyterlab>=4.5.0 + jupyterlab-slideshow>=0.3.4 + langchain>=1.1.0 + langchain-chroma>=1.0.0 + langchain-community>=0.4.1 + langchain-huggingface>=1.1.0 + langchain-mcp-adapters>=0.2.1 + langchain-mistralai>=1.1.0 + langchain-text-splitters>=1.0.0 + sentence-transformers>=5.1.2 + tavily-python>=0.7.14 + unstructured[all-docs]>=0.18.21 + ipywidgets + peft + python-dotenv \ No newline at end of file