🛺 fix: auto refinement parsing

This commit is contained in:
Florian Hönicke
2023-05-24 17:50:31 +02:00
parent 9086c4292d
commit 3e62a999b6
5 changed files with 27 additions and 22 deletions

View File

@@ -1,7 +1,7 @@
import json import json
from dev_gpt.apis.gpt import ask_gpt from dev_gpt.apis.gpt import ask_gpt
from dev_gpt.options.generate.parser import identity_parser from dev_gpt.options.generate.parser import identity_parser, optional_tripple_back_tick_parser
from dev_gpt.options.generate.prompt_factory import context_to_string from dev_gpt.options.generate.prompt_factory import context_to_string
from dev_gpt.options.generate.tools.tools import get_available_tools from dev_gpt.options.generate.tools.tools import get_available_tools
@@ -14,12 +14,12 @@ def auto_refine_description(context):
) )
context['request_schema'] = ask_gpt( context['request_schema'] = ask_gpt(
generate_request_schema_prompt, generate_request_schema_prompt,
identity_parser, optional_tripple_back_tick_parser,
context_string=context_to_string(context) context_string=context_to_string(context)
) )
context['response_schema'] = ask_gpt( context['response_schema'] = ask_gpt(
generate_output_schema_prompt, generate_output_schema_prompt,
identity_parser, optional_tripple_back_tick_parser,
context_string=context_to_string(context) context_string=context_to_string(context)
) )
context['microservice_description'] = ask_gpt( context['microservice_description'] = ask_gpt(
@@ -36,12 +36,13 @@ def auto_refine_description(context):
better_description_prompt = f'''{{context_string}} better_description_prompt = f'''{{context_string}}
Update the description of the Microservice to make it more precise without adding or removing information. Update the description of the Microservice to make it more precise without adding or removing information.
Note: the output must be a list of tasks the Microservice has to perform. Note: the output must be a list of tasks the Microservice has to perform.
Note: you can uses two tools if necessary: Note: you can uses the following tools if necessary:
{get_available_tools()} {get_available_tools()}
Example for the description: "return an image representing the current weather for a given location." Example for the description: "return an image representing the current weather for a given location." \
when the tools gpt-3.5-turbo and google-search are available:
1. get the current weather information from the https://openweathermap.org/ API 1. get the current weather information from the https://openweathermap.org/ API
2. generate a Google search query to find the image matching the weather information and the location by using gpt-3.5-turbo 2. generate a Google search query to find the image matching the weather information and the location by using gpt-3.5-turbo (a)
3. find the image by using the Google search API 3. find the image by using the Google search API (b)
4. return the image as a base64 encoded string''' 4. return the image as a base64 encoded string'''
generate_request_schema_prompt = '''{context_string} generate_request_schema_prompt = '''{context_string}

View File

@@ -15,20 +15,18 @@ def is_question_false(question):
def answer_yes_no_question(text, question): def answer_yes_no_question(text, question):
pros_and_cons = ask_gpt( pros_and_cons = ask_gpt(
pros_and_cons_prompt.format( pros_and_cons_prompt,
question=question, question=question,
text=text, text=text,
),
identity_parser,
) )
return ask_gpt( return ask_gpt(
question_prompt.format( question_prompt,
boolean_parser,
text=text, text=text,
question=question, question=question,
pros_and_cons=pros_and_cons, pros_and_cons=pros_and_cons,
), )
boolean_parser)
pros_and_cons_prompt = '''\ pros_and_cons_prompt = '''\
# Context # Context
@@ -42,5 +40,7 @@ question_prompt = '''\
{text} {text}
# Question # Question
{question} {question}
# Pros and Cons
{pros_and_cons}
Note: You must answer "yes" or "no". Note: You must answer "yes" or "no".
''' '''

View File

@@ -5,6 +5,12 @@ import re
def identity_parser(x): def identity_parser(x):
return x return x
def optional_tripple_back_tick_parser(x):
if '```' in x:
pattern = r'```(.+)```'
x = re.findall(pattern, x, re.DOTALL)[-1]
return x.strip()
def boolean_parser(x): def boolean_parser(x):
return 'yes' in x.lower() return 'yes' in x.lower()

View File

@@ -8,7 +8,6 @@ def context_to_string(context):
for k, v in context.items(): for k, v in context.items():
if isinstance(v, dict): if isinstance(v, dict):
v = json.dumps(v, indent=4) v = json.dumps(v, indent=4)
v = make_prompt_friendly(v)
context_strings.append(f'''\ context_strings.append(f'''\
{k}: {k}:
``` ```

View File

@@ -174,11 +174,10 @@ def test_generation_level_5_company_logos(microservice_dir, mock_input_sequence)
os.environ['VERBOSE'] = 'true' os.environ['VERBOSE'] = 'true'
generator = Generator( generator = Generator(
f'''\ f'''\
Given a list of email addresses, get all company names from them. Given a list of email addresses, get all unique company names from them.
For all companies, get the company logo. For all companies, get the company logo.
All logos need to be arranged on a square. All logos need to be arranged on a square.
The square is returned as png. The square is returned as png.''',
''',
str(microservice_dir), str(microservice_dir),
'gpt-3.5-turbo', 'gpt-3.5-turbo',
# self_healing=False, # self_healing=False,