# Importing necessary libraries and components from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool # smolagents is a framework for creating AI agents import datetime # For working with dates and times import requests # For making HTTP requests import pytz # For handling timezones import yaml # For parsing YAML configuration files from tools.final_answer import FinalAnswerTool # Custom tool for providing final answers # Importing the user interface component from Gradio_UI import GradioUI # Gradio is a library for creating web interfaces for ML models # Example tool definition using the @tool decorator # Tools are functions that the agent can use to interact with the world @tool def my_custom_tool(arg1: str, arg2: int) -> str: # The function signature defines the inputs and output type # This is a placeholder tool - you can replace it with your own functionality """A tool that does nothing yet Args: arg1: the first argument arg2: the second argument """ return "What magic will you build ?" # This is just a placeholder return value # A more useful tool that gets the current time in a specified timezone @tool def get_current_time_in_timezone(timezone: str) -> str: """A tool that fetches the current local time in a specified timezone. Args: timezone: A string representing a valid timezone (e.g., 'America/New_York'). """ try: # Create timezone object from the string parameter tz = pytz.timezone(timezone) # Get current time in that timezone and format it as a readable string local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") # Return a nicely formatted message with the time information return f"The current local time in {timezone} is: {local_time}" except Exception as e: # Error handling: return a helpful message if something goes wrong return f"Error fetching time for timezone '{timezone}': {str(e)}" @tool def calculator(operation: str, a: float, b: float) -> float: """A simple calculator that can perform basic operations. Args: operation: The math operation to perform ('add', 'subtract', 'multiply', 'divide'). a: First number. b: Second number. Returns: The numeric result of the operation. """ try: if operation.lower() == 'add': return a + b elif operation.lower() == 'subtract': return a - b elif operation.lower() == 'multiply': return a * b elif operation.lower() == 'divide': if b == 0: raise ValueError("Cannot divide by zero.") return a / b else: raise ValueError(f"Unknown operation '{operation}'. Please use 'add', 'subtract', 'multiply', or 'divide'.") except Exception as e: raise ValueError(f"Calculation error: {str(e)}") @tool def formatted_calculator(operation: str, a: float, b: float) -> str: """A calculator that returns a formatted string of the operation and result. Args: operation: The math operation to perform ('add', 'subtract', 'multiply', 'divide'). a: First number. b: Second number. """ try: result = calculator(operation, a, b) if operation.lower() == 'add': return f"{a} + {b} = {result}" elif operation.lower() == 'subtract': return f"{a} - {b} = {result}" elif operation.lower() == 'multiply': return f"{a} * {b} = {result}" elif operation.lower() == 'divide': return f"{a} / {b} = {result}" else: return f"Result: {result}" except ValueError as e: return str(e) # Initialize the final answer tool that allows the agent to provide conclusive responses final_answer = FinalAnswerTool() # Note about model availability # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder: # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud' # Initialize the language model that powers the agent model = HfApiModel( max_tokens=2096, # Maximum number of tokens (words/subwords) the model can generate temperature=0.5, # Controls randomness: lower = more deterministic, higher = more creative model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud', # Alternative endpoint custom_role_conversions=None, # No custom role definitions needed ) # Load a tool from Hugging Face Hub that can generate images from text descriptions image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True) # Load prompt templates from a YAML file # Prompt templates guide how the agent responds to different situations with open("prompts.yaml", 'r') as stream: # Open the file for reading prompt_templates = yaml.safe_load(stream) # Parse the YAML content # Khởi tạo Agent với các công cụ và cấu hình cần thiết agent = CodeAgent( model=model, # Mô hình ngôn ngữ lớn (LLM) sẽ cung cấp trí tuệ cho agent tools=[final_answer, calculator, get_current_time_in_timezone], # Danh sách các công cụ mà agent có thể sử dụng: công cụ trả lời cuối cùng, máy tính cơ bản và lấy thời gian theo múi giờ max_steps=6, # Giới hạn số lượng bước suy luận tối đa - ngăn agent chạy quá lâu verbosity_level=1, # Mức độ chi tiết của output: 0=silent, 1=normal, 2=detailed (hiển thị quá trình suy nghĩ) grammar=None, # Không sử dụng grammar đặc biệt nào để giới hạn output planning_interval=None, # Không sử dụng lập kế hoạch định kỳ (mặc định) name="Calculator Agent", # Tên của agent - "Calculator Agent" giúp xác định chức năng chính là máy tính description=None, # Không cung cấp mô tả đặc biệt (có thể thêm mô tả về khả năng của agent) prompt_templates=prompt_templates # Sử dụng các template được định nghĩa trong file prompts.yaml ) # Launch the Gradio user interface with our configured agent # This creates a web interface where users can interact with the agent GradioUI(agent).launch()