Merge pull request #1 from Torantulino/master

Merging up to latest on Torantulino/Auto-GPT Master
This commit is contained in:
EricFedrowisch
2023-04-04 10:33:38 -05:00
committed by GitHub
10 changed files with 82 additions and 83 deletions

View File

@@ -9,7 +9,7 @@ import ai_functions as ai
from file_operations import read_file, write_to_file, append_to_file, delete_file
from execute_code import execute_python_file
from json_parser import fix_and_parse_json
from googlesearch import search
from duckduckgo_search import ddg
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
@@ -72,7 +72,7 @@ def execute_command(command_name, arguments):
elif command_name == "delete_agent":
return delete_agent(arguments["key"])
elif command_name == "get_text_summary":
return get_text_summary(arguments["url"])
return get_text_summary(arguments["url"], arguments["question"])
elif command_name == "get_hyperlinks":
return get_hyperlinks(arguments["url"])
elif command_name == "read_file":
@@ -84,7 +84,7 @@ def execute_command(command_name, arguments):
elif command_name == "delete_file":
return delete_file(arguments["file"])
elif command_name == "browse_website":
return browse_website(arguments["url"])
return browse_website(arguments["url"], arguments["question"])
# TODO: Change these to take in a file rather than pasted code, if
# non-file is given, return instructions "Input should be a python
# filepath, write your code to file and try again"
@@ -112,7 +112,7 @@ def get_datetime():
def google_search(query, num_results=8):
search_results = []
for j in search(query, num_results=num_results):
for j in ddg(query, max_results=num_results):
search_results.append(j)
return json.dumps(search_results, ensure_ascii=False, indent=4)
@@ -152,8 +152,8 @@ def google_official_search(query, num_results=8):
# Return the list of search result URLs
return search_results_links
def browse_website(url):
summary = get_text_summary(url)
def browse_website(url, question):
summary = get_text_summary(url, question)
links = get_hyperlinks(url)
# Limit links to 5
@@ -165,9 +165,9 @@ def browse_website(url):
return result
def get_text_summary(url):
def get_text_summary(url, question):
text = browse.scrape_text(url)
summary = browse.summarize_text(text)
summary = browse.summarize_text(text, question)
return """ "Result" : """ + summary