diff --git a/agent-py-bot/agents/runner.py b/agent-py-bot/agents/runner.py index d165923..d6fb3c8 100644 --- a/agent-py-bot/agents/runner.py +++ b/agent-py-bot/agents/runner.py @@ -10,4 +10,8 @@ def execute_python_code(code_block): except Exception as e: return f"Execution error: {str(e)}" - \ No newline at end of file +def execute_trading_action(action): + # Placeholder for executing trading actions + # This could be an API call to a trading platform + print(f"Executing trading action: {action}") + diff --git a/agent-py-bot/agents/webagent.py b/agent-py-bot/agents/webagent.py index 1024e31..59c1078 100644 --- a/agent-py-bot/agents/webagent.py +++ b/agent-py-bot/agents/webagent.py @@ -46,9 +46,15 @@ def parse_rss_feed(feed_url): articles = [{'title': entry.title, 'link': entry.link} for entry in feed.entries] return articles +import yfinance as yf from selenium import webdriver from selenium.webdriver.chrome.options import Options +def fetch_stock_data(ticker, interval='1d', period='1mo'): + stock = yf.Ticker(ticker) + hist = stock.history(interval=interval, period=period) + return hist + def search_google_news(topic): options = Options() options.headless = True @@ -148,6 +154,17 @@ def get_news_api_results(query, api_key, from_param): except Exception as e: return f"API Request Error: {e}" +def search_tavily(topic, api_key): + url = f"https://app.tavily.com/api/search?q={topic}" + headers = { + "Authorization": f"Bearer {api_key}" + } + response = requests.get(url, headers=headers) + if response.status_code == 200: + return response.json() + else: + return {"error": response.text} + def search_news(topic): # DuckDuckGo Results duck_results = search_duckduckgo(topic) @@ -205,4 +222,7 @@ def summarize_data(data): def run_web_agent(topic, folder): print(f"[{datetime.now()}] Running web agent for topic: {topic}") news_data = search_news(topic) - return news_data \ No newline at end of file + tavily_api_key = "YOUR_TAVILY_API_KEY" + tavily_results = search_tavily(topic, tavily_api_key) + news_data["tavily"] = tavily_results + return news_data