Merge branch 'master' into add_website_memory

This commit is contained in:
Maiko Bossuyt
2023-04-12 23:18:09 +02:00
committed by GitHub
32 changed files with 787 additions and 163 deletions

View File

@@ -3,6 +3,7 @@ from bs4 import BeautifulSoup
from memory import get_memory
from config import Config
from llm_utils import create_chat_completion
from urllib.parse import urlparse, urljoin
cfg = Config()
memory = get_memory(cfg)
@@ -10,6 +11,27 @@ memory = get_memory(cfg)
session = requests.Session()
session.headers.update({'User-Agent': cfg.user_agent})
# Function to check if the URL is valid
def is_valid_url(url):
try:
result = urlparse(url)
return all([result.scheme, result.netloc])
except ValueError:
return False
# Function to sanitize the URL
def sanitize_url(url):
return urljoin(url, urlparse(url).path)
# Function to make a request with a specified timeout and handle exceptions
def make_request(url, timeout=10):
try:
response = session.get(url, timeout=timeout)
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
return "Error: " + str(e)
# Define and check for local file address prefixes
def check_local_file_access(url):
local_prefixes = ['file:///', 'file://localhost', 'http://localhost', 'https://localhost']
@@ -17,22 +39,29 @@ def check_local_file_access(url):
def scrape_text(url):
"""Scrape text from a webpage"""
# Most basic check if the URL is valid:
# Basic check if the URL is valid
if not url.startswith('http'):
return "Error: Invalid URL"
# Restrict access to local files
if check_local_file_access(url):
return "Error: Access to local files is restricted"
try:
response = session.get(url)
except requests.exceptions.RequestException as e:
return "Error: " + str(e)
# Check if the response contains an HTTP error
if response.status_code >= 400:
return "Error: HTTP " + str(response.status_code) + " error"
# Validate the input URL
if not is_valid_url(url):
# Sanitize the input URL
sanitized_url = sanitize_url(url)
# Make the request with a timeout and handle exceptions
response = make_request(sanitized_url)
if isinstance(response, str):
return response
else:
# Sanitize the input URL
sanitized_url = sanitize_url(url)
response = session.get(sanitized_url)
soup = BeautifulSoup(response.text, "html.parser")