mirror of
https://github.com/aljazceru/dev-gpt.git
synced 2025-12-20 15:14:20 +01:00
feat: search fix web search
This commit is contained in:
@@ -40,9 +40,10 @@ class TaskSpecification:
|
||||
|
||||
|
||||
class Generator:
|
||||
def __init__(self, task_description, path, model='gpt-4'):
|
||||
def __init__(self, task_description, path, model='gpt-4', self_healing=True):
|
||||
self.gpt_session = gpt.GPTSession(model=model)
|
||||
self.microservice_specification = TaskSpecification(task=task_description, test=None)
|
||||
self.self_healing = self_healing
|
||||
self.microservice_root_path = path
|
||||
self.microservice_name = None
|
||||
self.previous_microservice_path = None
|
||||
@@ -325,7 +326,7 @@ pytest
|
||||
if not is_executor_in_hub(gateway_name):
|
||||
raise Exception(f'{self.microservice_name} not in hub. Hubble logs: {hubble_log}')
|
||||
|
||||
def debug_microservice(self, num_approach, packages):
|
||||
def debug_microservice(self, num_approach, packages, self_healing):
|
||||
for i in range(1, MAX_DEBUGGING_ITERATIONS):
|
||||
print('Debugging iteration', i)
|
||||
print('Trying to debug the microservice. Might take a while...')
|
||||
@@ -333,6 +334,8 @@ pytest
|
||||
log_hubble = push_executor(self.cur_microservice_path)
|
||||
error = process_error_message(log_hubble)
|
||||
if error:
|
||||
if not self_healing:
|
||||
raise Exception('Self-healing is disabled. Please fix the error manually.', error)
|
||||
print('An error occurred during the build process. Feeding the error back to the assistant...')
|
||||
self.previous_microservice_path = self.cur_microservice_path
|
||||
self.cur_microservice_path = get_microservice_path(
|
||||
@@ -520,7 +523,7 @@ pytest
|
||||
for num_approach, packages in enumerate(packages_list):
|
||||
try:
|
||||
self.generate_microservice(packages, num_approach)
|
||||
self.debug_microservice(num_approach, packages)
|
||||
self.debug_microservice(num_approach, packages, self.self_healing)
|
||||
self.generate_playground()
|
||||
except self.MaxDebugTimeReachedException:
|
||||
print('Could not debug the Microservice with the approach:', packages)
|
||||
|
||||
@@ -140,7 +140,8 @@ It will be tested with the following scenario: '{{test_description}}'.
|
||||
For the implementation use the following package(s): '{{packages}}'.
|
||||
|
||||
The code must start with the following imports:
|
||||
```{linebreak +'from .apis import GPT_3_5_Turbo' if is_using_gpt_3_5_turbo else ""}
|
||||
```{linebreak +'from .apis import GPT_3_5_Turbo' if is_using_gpt_3_5_turbo else ""}{linebreak +'from .apis import search_web, search_images' if is_using_google_custom_search else ""}{linebreak}
|
||||
|
||||
import json
|
||||
import requests
|
||||
```
|
||||
|
||||
@@ -22,7 +22,8 @@ def test_generation_level_0(microservice_dir, mock_input_sequence):
|
||||
generator = Generator(
|
||||
"The microservice is very simple, it does not take anything as input and only outputs the word 'test'",
|
||||
microservice_dir,
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -46,7 +47,8 @@ Example tweet:
|
||||
\'When your coworker microwaves fish in the break room... AGAIN. 🐟🤢
|
||||
But hey, at least SOMEONE's enjoying their lunch. #officelife\'''',
|
||||
str(microservice_dir),
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -66,7 +68,8 @@ def test_generation_level_2(microservice_dir, mock_input_sequence):
|
||||
generator = Generator(
|
||||
"The input is a PDF and the output the summarized text (50 words).",
|
||||
str(microservice_dir),
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -85,7 +88,8 @@ def test_generation_level_2_svg(microservice_dir, mock_input_sequence):
|
||||
generator = Generator(
|
||||
"Get a png as input and return a vectorized version as svg.",
|
||||
str(microservice_dir),
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -111,7 +115,8 @@ def test_generation_level_3(microservice_dir, mock_input_sequence):
|
||||
Example input: 'AAPL'
|
||||
''',
|
||||
str(microservice_dir),
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -155,7 +160,8 @@ def test_generation_level_4(microservice_dir, mock_input_sequence):
|
||||
4. Return the the audio file as base64 encoded binary.
|
||||
''',
|
||||
str(microservice_dir),
|
||||
'gpt-4'
|
||||
'gpt-4',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -170,7 +176,8 @@ All logos need to be arranged on a square.
|
||||
The square is returned as png.
|
||||
''',
|
||||
str(microservice_dir),
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
@@ -209,7 +216,8 @@ The joke is the put on the image.
|
||||
The output is the image with the joke on it.
|
||||
''',
|
||||
str(microservice_dir),
|
||||
'gpt-3.5-turbo'
|
||||
'gpt-3.5-turbo',
|
||||
self_healing=False,
|
||||
)
|
||||
assert generator.generate() == 0
|
||||
|
||||
|
||||
Reference in New Issue
Block a user