feat: search fix web search

This commit is contained in:
Florian Hönicke
2023-05-20 01:05:09 +02:00
parent b2f1ce4489
commit 16f1f7b6af
4 changed files with 34 additions and 25 deletions

View File

@@ -257,11 +257,12 @@ metas:
print('\nFirst version of the microservice generated. Start iterating on it to make the tests pass...')
def add_missing_imports_post_process_fn(self, content_raw: str):
def add_missing_imports_post_process_fn(self, content_dict: dict):
for indicator, import_statement in INDICATOR_TO_IMPORT_STATEMENT.items():
if indicator in content_raw and import_statement not in content_raw:
content_raw = f'{import_statement}\n{content_raw}'
return content_raw
for file_name, file_content in content_dict.items():
if indicator in file_content and import_statement not in file_content:
content_dict[file_name] = f'{import_statement}\n{file_content}'
return content_dict
@staticmethod

View File

@@ -6,10 +6,10 @@ openai.api_key = os.getenv("OPENAI_API_KEY")
class GPT_3_5_Turbo:
def __init__(self, system: str = ''):
self.system = system
def __init__(self, system_string: str = ''):
self.system = system_string
def __call__(self, prompt: str) -> str:
def __call__(self, prompt_string: str) -> str:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{
@@ -17,7 +17,7 @@ class GPT_3_5_Turbo:
"content": self.system
}, {
"role": 'user',
"content": prompt
"content": prompt_string
}]
)
return response.choices[0]['message']['content']

View File

@@ -97,13 +97,13 @@ gpt_35_turbo_usage_string = """If you need to use gpt_3_5_turbo, then use it lik
from .apis import GPT_3_5_Turbo
gpt_3_5_turbo = GPT_3_5_Turbo(
system=\'\'\'
system_string=\'\'\'
You are a tv-reporter who is specialized in C-list celebrities.
When you get asked something like 'Who was having a date with <X>?', then you answer with a json like '{{"dates": ["<Y>", "<Z>"]}}'.
You must not answer something else - only the json.
\'\'\')
generated_string = gpt_3_5_turbo("example user prompt") # prompt is a string; generated_string is a string
generated_string = gpt_3_5_turbo(prompt_string="example user prompt") # prompt_string is the only parameter
```
"""

View File

@@ -28,7 +28,6 @@ def test_generation_level_0(microservice_dir, mock_input_sequence):
assert generator.generate() == 0
@pytest.mark.parametrize('mock_input_sequence', [['y']], indirect=True)
def test_generation_level_1(microservice_dir, mock_input_sequence):
"""
@@ -48,12 +47,13 @@ Example tweet:
But hey, at least SOMEONE's enjoying their lunch. #officelife\'''',
str(microservice_dir),
'gpt-3.5-turbo',
self_healing=False,
# self_healing=False,
)
assert generator.generate() == 0
@pytest.mark.parametrize('mock_input_sequence', [['y', 'https://www.africau.edu/images/default/sample.pdf']], indirect=True)
@pytest.mark.parametrize('mock_input_sequence', [['y', 'https://www.africau.edu/images/default/sample.pdf']],
indirect=True)
def test_generation_level_2(microservice_dir, mock_input_sequence):
"""
Requirements:
@@ -69,11 +69,13 @@ def test_generation_level_2(microservice_dir, mock_input_sequence):
"The input is a PDF and the output the summarized text (50 words).",
str(microservice_dir),
'gpt-3.5-turbo',
self_healing=False,
# self_healing=False,
)
assert generator.generate() == 0
@pytest.mark.parametrize('mock_input_sequence', [['y', 'https://upload.wikimedia.org/wikipedia/commons/4/47/PNG_transparency_demonstration_1.png']], indirect=True)
@pytest.mark.parametrize('mock_input_sequence', [
['y', 'https://upload.wikimedia.org/wikipedia/commons/4/47/PNG_transparency_demonstration_1.png']], indirect=True)
def test_generation_level_2_svg(microservice_dir, mock_input_sequence):
"""
Requirements:
@@ -89,7 +91,7 @@ def test_generation_level_2_svg(microservice_dir, mock_input_sequence):
"Get a png as input and return a vectorized version as svg.",
str(microservice_dir),
'gpt-3.5-turbo',
self_healing=False,
# self_healing=False,
)
assert generator.generate() == 0
@@ -116,10 +118,11 @@ Example input: 'AAPL'
''',
str(microservice_dir),
'gpt-3.5-turbo',
self_healing=False,
# self_healing=False,
)
assert generator.generate() == 0
@pytest.mark.parametrize(
'mock_input_sequence', [
[
@@ -161,10 +164,11 @@ def test_generation_level_4(microservice_dir, mock_input_sequence):
''',
str(microservice_dir),
'gpt-4',
self_healing=False,
# self_healing=False,
)
assert generator.generate() == 0
@pytest.mark.parametrize('mock_input_sequence', [['y']], indirect=True)
def test_generation_level_5_company_logos(microservice_dir, mock_input_sequence):
os.environ['VERBOSE'] = 'true'
@@ -177,11 +181,14 @@ The square is returned as png.
''',
str(microservice_dir),
'gpt-3.5-turbo',
self_healing=False,
# self_healing=False,
)
assert generator.generate() == 0
@pytest.mark.parametrize('mock_input_sequence', [['y', 'https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/PNG_transparency_demonstration_1.png/560px-PNG_transparency_demonstration_1.png']], indirect=True)
@pytest.mark.parametrize('mock_input_sequence', [['y',
'https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/PNG_transparency_demonstration_1.png/560px-PNG_transparency_demonstration_1.png']],
indirect=True)
def test_generation_level_5(microservice_dir, mock_input_sequence):
"""
Requirements:
@@ -193,7 +200,8 @@ def test_generation_level_5(microservice_dir, mock_input_sequence):
Databases: ❌
"""
os.environ['VERBOSE'] = 'true'
generator = Generator(f'''
generator = Generator(
f'''
The input is an image.
Use the following api to get the description of the image:
Request:
@@ -215,10 +223,10 @@ The description is then used to generate a joke.
The joke is the put on the image.
The output is the image with the joke on it.
''',
str(microservice_dir),
'gpt-3.5-turbo',
self_healing=False,
)
str(microservice_dir),
'gpt-3.5-turbo',
# self_healing=False,
)
assert generator.generate() == 0
# @pytest.fixture