doing summary with AI
This commit is contained in:
parent
3d114d1a76
commit
17d7316ef9
4
.gitignore
vendored
4
.gitignore
vendored
@ -2,4 +2,6 @@ node_modules/
|
|||||||
package-lock.json
|
package-lock.json
|
||||||
rec/*
|
rec/*
|
||||||
*/__pycache__/*
|
*/__pycache__/*
|
||||||
__pycache__
|
__pycache__
|
||||||
|
agent-py-bot/scrape/raw/summary_log.txt
|
||||||
|
agent-py-bot/scrape/raw/*
|
@ -11,6 +11,7 @@ from selenium import webdriver
|
|||||||
from selenium.webdriver.chrome.options import Options
|
from selenium.webdriver.chrome.options import Options
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
# Apply nest_asyncio
|
# Apply nest_asyncio
|
||||||
nest_asyncio.apply()
|
nest_asyncio.apply()
|
||||||
@ -78,13 +79,18 @@ import re
|
|||||||
from agents.runner import execute_python_code
|
from agents.runner import execute_python_code
|
||||||
|
|
||||||
#https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion
|
#https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion
|
||||||
async def query_llm(user_message):
|
async def query_llm(user_message, model=None):
|
||||||
"""Query the LLM with the user's message."""
|
"""Query the LLM with the user's message."""
|
||||||
|
# use the model if provided, otherwise use the default llama2
|
||||||
|
if model is None:
|
||||||
|
model = "llama2:latest"
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"model": "llama2:latest",
|
"model": model,
|
||||||
"messages": [{"role": "user", "content": user_message}],
|
"messages": [{"role": "user", "content": user_message}],
|
||||||
"stream": False
|
"stream": False
|
||||||
}
|
}
|
||||||
|
|
||||||
response = requests.post(LLM_ENDPOINT, json=data)
|
response = requests.post(LLM_ENDPOINT, json=data)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
response_data = response.json()
|
response_data = response.json()
|
||||||
@ -99,15 +105,15 @@ async def query_llm(user_message):
|
|||||||
|
|
||||||
# Find and execute all code blocks
|
# Find and execute all code blocks
|
||||||
code_blocks = re.findall(r"```(.*?)```", content, re.DOTALL)
|
code_blocks = re.findall(r"```(.*?)```", content, re.DOTALL)
|
||||||
for code in code_blocks:
|
if code_blocks:
|
||||||
execution_result = execute_python_code(code.strip())
|
for code in code_blocks:
|
||||||
if APPEND_RESULTS:
|
execution_result = execute_python_code(code.strip())
|
||||||
# Append the result after the code block
|
if APPEND_RESULTS:
|
||||||
content = content.replace(f"```{code}```", f"```{code}```\n```{execution_result}```")
|
# Append the result after the code block
|
||||||
else:
|
content = content.replace(f"```{code}```", f"```{code}```\n```{execution_result}```")
|
||||||
# Replace the code block with its result
|
else:
|
||||||
content = content.replace(f"```{code}```", f"```{execution_result}```")
|
# Replace the code block with its result
|
||||||
|
content = content.replace(f"```{code}```", f"```{execution_result}```")
|
||||||
|
|
||||||
return content
|
return content
|
||||||
else:
|
else:
|
||||||
@ -193,50 +199,35 @@ import time
|
|||||||
from agents.webagent import run_web_agent, save_data
|
from agents.webagent import run_web_agent, save_data
|
||||||
|
|
||||||
async def run_web_agent_and_process_result(topic, folder):
|
async def run_web_agent_and_process_result(topic, folder):
|
||||||
print(f"Running web agent for topic: {topic}")
|
|
||||||
news_data = run_web_agent(topic, folder)
|
news_data = run_web_agent(topic, folder)
|
||||||
save_data(news_data, folder)
|
|
||||||
# summary = summarize_data(news_data)
|
# summary = summarize_data(news_data)
|
||||||
|
|
||||||
# with open(os.path.join(folder, "summary_log.txt"), 'a') as log_file:
|
# with open(os.path.join(folder, "summary_log.txt"), 'a') as log_file:
|
||||||
# log_file.write(f"{datetime.now()}: {summary}\n")
|
# log_file.write(f"{datetime.now()}: {summary}\n")
|
||||||
|
|
||||||
# Process the result immediately after obtaining it
|
# Process the result immediately after obtaining it
|
||||||
user_message = f"New data received: {news_data}"
|
user_message = f"Summarize these news and make sentiment analysis on each news and one overall: {news_data}"
|
||||||
query_result = await query_llm(user_message)
|
|
||||||
|
print(f"[{datetime.now()}] Doing sentiment analysis with AI model.")
|
||||||
|
start = time.time()
|
||||||
|
query_result = await query_llm(user_message, "openhermes")
|
||||||
|
print(f"[{datetime.now()}] AI call returned in {time.time() - start} seconds.")
|
||||||
|
news_data["summary"] = query_result
|
||||||
|
save_data(news_data, folder)
|
||||||
|
|
||||||
|
with open(os.path.join(folder, "summary_log.txt"), 'a') as log_file:
|
||||||
|
log_file.write(f"{datetime.now()}: {query_result}\n")
|
||||||
|
|
||||||
# Process the query_result as needed
|
# Process the query_result as needed
|
||||||
|
|
||||||
async def async_main():
|
async def async_main():
|
||||||
# Assuming this is your asynchronous main function with its full details
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
if loop.is_running():
|
|
||||||
loop.create_task(main())
|
|
||||||
else:
|
|
||||||
await main()
|
|
||||||
|
|
||||||
def sync_main():
|
|
||||||
# Synchronous part for scheduling
|
|
||||||
topic = "tesla news"
|
topic = "tesla news"
|
||||||
interval = 8 # in minutes
|
interval = 1 # in hours
|
||||||
folder = "agent-py-bot/scrape/raw"
|
folder = "agent-py-bot/scrape/raw"
|
||||||
|
|
||||||
# schedule.every(interval).minutes.do(run_web_agent_and_process_result, topic=topic, folder=folder)
|
|
||||||
schedule.every(interval).hours.do(run_web_agent_and_process_result, topic=topic, folder=folder)
|
|
||||||
# Run once at the start
|
|
||||||
news_json = await run_web_agent_and_process_result(topic=topic, folder=folder)
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
schedule.run_pending()
|
await run_web_agent_and_process_result(topic=topic, folder=folder)
|
||||||
time.sleep(1)
|
await asyncio.sleep(interval * 60) # Convert hours to seconds
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
loop = asyncio.get_event_loop()
|
asyncio.run(async_main())
|
||||||
|
|
||||||
# Run the asynchronous part
|
|
||||||
if loop.is_running():
|
|
||||||
loop.create_task(async_main())
|
|
||||||
else:
|
|
||||||
loop.run_until_complete(async_main())
|
|
||||||
|
|
||||||
# Run the synchronous part
|
|
||||||
sync_main()
|
|
@ -203,7 +203,7 @@ def summarize_data(data):
|
|||||||
return summary
|
return summary
|
||||||
|
|
||||||
def run_web_agent(topic, folder):
|
def run_web_agent(topic, folder):
|
||||||
print(f"Running web agent for topic: {topic}")
|
print(f"[{datetime.now()}] Running web agent for topic: {topic}")
|
||||||
news_data = search_news(topic)
|
news_data = search_news(topic)
|
||||||
# save_data(news_data, folder)
|
# save_data(news_data, folder)
|
||||||
# summary = summarize_data(news_data)
|
# summary = summarize_data(news_data)
|
||||||
|
@ -1,11 +0,0 @@
|
|||||||
2023-12-23 01:18:42.922812: Summarized information
|
|
||||||
2023-12-25 17:02:01.477567: Summarized information
|
|
||||||
2024-01-08 13:12:04.190959: Summarized information
|
|
||||||
2024-01-08 13:13:03.437567: Summarized information
|
|
||||||
2024-01-08 13:14:04.749784: Summarized information
|
|
||||||
2024-01-08 13:15:06.100403: Summarized information
|
|
||||||
2024-01-08 13:16:07.387491: Summarized information
|
|
||||||
2024-01-08 13:17:09.016139: Summarized information
|
|
||||||
2024-01-08 13:18:10.384559: Summarized information
|
|
||||||
2024-01-08 13:19:12.129203: Summarized information
|
|
||||||
2024-01-08 13:20:13.569597: Summarized information
|
|
Loading…
x
Reference in New Issue
Block a user