Spaces:
Running
Running
File size: 4,287 Bytes
9b5b26a 7ae8483 9b5b26a c19d193 6aae614 8fe992b e126484 9b5b26a e126484 7ae8483 9b5b26a 7ae8483 9b5b26a 7ae8483 9b5b26a 7ae8483 9b5b26a 8c01ffb 6aae614 ae7a494 e121372 bf6d34c 7ae8483 fe328e0 13d500a 8c01ffb 9b5b26a 7ae8483 8c01ffb 861422e 7ae8483 8c01ffb 8fe992b 7ae8483 8c01ffb 7ae8483 8c01ffb 861422e 8fe992b 9b5b26a fa24069 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
from pydantic import BaseModel
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
import os
import base64
from Gradio_UI import GradioUI
LANGFUSE_PUBLIC_KEY = os.getenv("LANGFUSE_PUBLIC_KEY")
LANGFUSE_SECRET_KEY = os.getenv("LANGFUSE_SECRET_KEY")
LANGFUSE_AUTH=base64.b64encode(f"{LANGFUSE_PUBLIC_KEY}:{LANGFUSE_SECRET_KEY}".encode()).decode()
#os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://cloud.langfuse.com/api/public/otel" # EU data region
os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://us.cloud.langfuse.com/api/public/otel" # US data region
os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"
from opentelemetry.sdk.trace import TracerProvider
from openinference.instrumentation.smolagents import SmolagentsInstrumentor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
trace_provider = TracerProvider()
trace_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter()))
SmolagentsInstrumentor().instrument(tracer_provider=trace_provider)
GAME_API_KEY = os.getenv("GAME_API_KEY")
GAME_API_BASE_URL = os.getenv("GAME_API_BASE_URL")
class Game_Response(BaseModel):
name: str
description: str
year_published: int
min_players: int
max_players: int
users_rated: int
average_rating: float
bayes_adjusted_average: float
complexity: float
@tool
def boardgame_lookup_tool(q:str)-> list[Game_Response]:
"""A tool that fetches information about board games
Args:
q: a search term representing part or all of a board game's name or description, prefixed with either "name:" or "description:"
"""
url = f'{GAME_API_BASE_URL}/games'
headers = {'x-api-key': f'{GAME_API_KEY}'}
response = requests.get(url, headers=headers, params={'q': q, 'search_type': search_type})
api_data_list = response.json()
return [Game_Response(name = api_data['name'],
description = api_data['description'],
year_published = api_data['year_published'],
min_players = api_data['min_players'],
max_players = api_data['max_players'],
users_rated = api_data['users_rated'],
average_rating = api_data['average'],
bayes_adjusted_average = api_data['bayes_average'],
complexity = api_data['weight']) for api_data in api_data_list]
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
#model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud',
custom_role_conversions=None,
)
# Import tool from Hub
#image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer,boardgame_lookup_tool],
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch()
|