mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-22 16:34:25 +01:00
improve performance and removed code duplication
This commit is contained in:
@@ -5,14 +5,17 @@ from llm_utils import create_chat_completion
|
|||||||
|
|
||||||
cfg = Config()
|
cfg = Config()
|
||||||
|
|
||||||
def scrape_text(url):
|
def get_website_content(url):
|
||||||
response = requests.get(url, headers=cfg.user_agent_header)
|
response = requests.get(url, headers=cfg.user_agent_header)
|
||||||
|
|
||||||
# Check if the response contains an HTTP error
|
# Check if the response contains an HTTP error
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
return "Error: HTTP " + str(response.status_code) + " error"
|
return "Error: HTTP " + str(response.status_code) + " error"
|
||||||
|
return response
|
||||||
|
|
||||||
soup = BeautifulSoup(response.text, "html.parser")
|
|
||||||
|
|
||||||
|
def scrape_text(website_content):
|
||||||
|
soup = BeautifulSoup(website_content.text, "html.parser")
|
||||||
|
|
||||||
for script in soup(["script", "style"]):
|
for script in soup(["script", "style"]):
|
||||||
script.extract()
|
script.extract()
|
||||||
@@ -39,14 +42,8 @@ def format_hyperlinks(hyperlinks):
|
|||||||
return formatted_links
|
return formatted_links
|
||||||
|
|
||||||
|
|
||||||
def scrape_links(url):
|
def scrape_links(website_content):
|
||||||
response = requests.get(url, headers=cfg.user_agent_header)
|
soup = BeautifulSoup(website_content.text, "html.parser")
|
||||||
|
|
||||||
# Check if the response contains an HTTP error
|
|
||||||
if response.status_code >= 400:
|
|
||||||
return "error"
|
|
||||||
|
|
||||||
soup = BeautifulSoup(response.text, "html.parser")
|
|
||||||
|
|
||||||
for script in soup(["script", "style"]):
|
for script in soup(["script", "style"]):
|
||||||
script.extract()
|
script.extract()
|
||||||
|
|||||||
@@ -163,8 +163,9 @@ def google_official_search(query, num_results=8):
|
|||||||
return search_results_links
|
return search_results_links
|
||||||
|
|
||||||
def browse_website(url, question):
|
def browse_website(url, question):
|
||||||
summary = get_text_summary(url, question)
|
website_content = browse.get_website_content(url)
|
||||||
links = get_hyperlinks(url)
|
summary = get_text_summary(website_content, question)
|
||||||
|
links = get_hyperlinks(website_content)
|
||||||
|
|
||||||
# Limit links to 5
|
# Limit links to 5
|
||||||
if len(links) > 5:
|
if len(links) > 5:
|
||||||
@@ -175,14 +176,14 @@ def browse_website(url, question):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_text_summary(url, question):
|
def get_text_summary(website_content, question):
|
||||||
text = browse.scrape_text(url)
|
text = browse.scrape_text(website_content)
|
||||||
summary = browse.summarize_text(text, question)
|
summary = browse.summarize_text(text, question)
|
||||||
return """ "Result" : """ + summary
|
return """ "Result" : """ + summary
|
||||||
|
|
||||||
|
|
||||||
def get_hyperlinks(url):
|
def get_hyperlinks(website_content):
|
||||||
link_list = browse.scrape_links(url)
|
link_list = browse.scrape_links(website_content)
|
||||||
return link_list
|
return link_list
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user