File size: 1,201 Bytes
7fb1978
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36

from smolagents import tool

def clean_answer_with_prompt(agent_output: str) -> str:
    """
    Extracts and cleans the final answer from the agent output.
    For GAIA, ensure no 'FINAL ANSWER:' prefix is returned β€” just the answer.
    """
    if "FINAL ANSWER:" in agent_output:
        return agent_output.split("FINAL ANSWER:")[-1].strip()
    return agent_output.strip()

def build_prompt(question: str, context: str) -> str:
    """
    Combine the system instruction, context, and question to build the LLM prompt.
    """
    system_instruction = (
        "You are an intelligent assistant helping answer complex real-world questions. "
        "Use the provided context to reason and provide a concise factual answer. "
        "Only answer what is asked. Do not include 'FINAL ANSWER:' or extra explanation.\n\n"
    )
    return f"{system_instruction}Context:\n{context}\n\nQuestion: {question}\nAnswer:"

@tool
def greeting_tool(name: str) -> str:
    """
    Generates a custom greeting for the guest.

    Args:
        name: Name of the guest

    Returns:
        A friendly greeting message.
    """
    return f"Welcome to the gala, {name}! We're honored to have you with us."