refactor: cleanup

This commit is contained in:
Florian Hönicke
2023-05-09 18:08:09 +02:00
parent b86ba5df09
commit c29c212e12
8 changed files with 354 additions and 328 deletions

View File

@@ -1,5 +1,5 @@
from dev_gpt.apis.gpt import ask_gpt from dev_gpt.apis.gpt import ask_gpt
from dev_gpt.options.generate.chains.prompt_factory import context_to_string from dev_gpt.options.generate.prompt_factory import context_to_string
from dev_gpt.options.generate.parser import identity_parser from dev_gpt.options.generate.parser import identity_parser

View File

@@ -0,0 +1,38 @@
from dev_gpt.apis.gpt import ask_gpt
from dev_gpt.options.generate.parser import identity_parser
def user_feedback_loop(context, current_description):
while (user_feedback := get_user_feedback(current_description)):
context['user_feedback'] = user_feedback
current_description = ask_gpt(
add_feedback_prompt,
identity_parser,
**context
)
del context['user_feedback']
return current_description
def get_user_feedback(microservice_description):
while True:
user_feedback = input(
f'I suggest that we implement the following microservice:\n{microservice_description}\nDo you agree? [y/n]')
if user_feedback.lower() in ['y', 'yes', 'yeah', 'yep', 'yup', 'sure', 'ok', 'okay']:
print('Great! I will hand this over to the developers!')
return None
elif user_feedback.lower() in ['n', 'no', 'nope', 'nah', 'nay', 'not']:
return input('What do you want to change?')
add_feedback_prompt = '''\
Microservice description:
```
{microservice_description}
```
User feedback:
```
{user_feedback}
```
Update the microservice description by incorporating the user feedback in a concise way without losing any information.'''

View File

@@ -1,5 +1,5 @@
from dev_gpt.apis.gpt import ask_gpt from dev_gpt.apis.gpt import ask_gpt
from dev_gpt.options.generate.chains.prompt_factory import context_to_string from dev_gpt.options.generate.prompt_factory import context_to_string
from dev_gpt.options.generate.parser import boolean_parser from dev_gpt.options.generate.parser import boolean_parser

View File

@@ -481,15 +481,15 @@ dev-gpt deploy --path {self.microservice_root_path}
] ]
return packages_list return packages_list
def create_prototype_implementation(self): # def create_prototype_implementation(self):
microservice_py_lines = ['''\ # microservice_py_lines = ['''\
Class {microservice_name}:'''] # Class {microservice_name}:''']
for sub_task in self.pm.iterate_over_sub_tasks_pydantic(self.sub_task_tree): # for sub_task in self.pm.iterate_over_sub_tasks_pydantic(self.sub_task_tree):
microservice_py_lines.append(f' {sub_task.python_fn_signature}') # microservice_py_lines.append(f' {sub_task.python_fn_signature}')
microservice_py_lines.append(f' """') # microservice_py_lines.append(f' """')
microservice_py_lines.append(f' {sub_task.python_fn_docstring}') # microservice_py_lines.append(f' {sub_task.python_fn_docstring}')
microservice_py_lines.append(f' """') # microservice_py_lines.append(f' """')
microservice_py_lines.append(f' raise NotImplementedError') # microservice_py_lines.append(f' raise NotImplementedError')
microservice_py_str = '\n'.join(microservice_py_lines) # microservice_py_str = '\n'.join(microservice_py_lines)
persist_file(os.path.join(self.microservice_root_path, 'microservice.py'), microservice_py_str) # persist_file(os.path.join(self.microservice_root_path, 'microservice.py'), microservice_py_str)

View File

@@ -1,18 +1,15 @@
import json
import re
from typing import Generator
from dev_gpt.apis import gpt from dev_gpt.apis import gpt
from dev_gpt.apis.gpt import ask_gpt from dev_gpt.apis.gpt import ask_gpt
from dev_gpt.options.generate.chains.condition import is_false, is_true from dev_gpt.options.generate.chains.user_confirmation_feedback_loop import user_feedback_loop
from dev_gpt.options.generate.chains.get_user_input_if_neede import get_user_input_if_needed from dev_gpt.options.generate.condition import is_false, is_true
from dev_gpt.options.generate.parser import identity_parser, boolean_parser, json_parser from dev_gpt.options.generate.chains.get_user_input_if_needed import get_user_input_if_needed
from dev_gpt.options.generate.pm.task_tree_schema import TaskTree from dev_gpt.options.generate.parser import identity_parser
# from dev_gpt.options.generate.pm.task_tree_schema import TaskTree
from dev_gpt.options.generate.ui import get_random_employee from dev_gpt.options.generate.ui import get_random_employee
class PM: class PM:
def refine_specification(self, microservice_description) -> TaskTree: def refine_specification(self, microservice_description):
pm = get_random_employee('pm') pm = get_random_employee('pm')
print(f'{pm.emoji}👋 Hi, I\'m {pm.name}, a PM at Jina AI. Gathering the requirements for our engineers.') print(f'{pm.emoji}👋 Hi, I\'m {pm.name}, a PM at Jina AI. Gathering the requirements for our engineers.')
original_task = microservice_description original_task = microservice_description
@@ -34,152 +31,29 @@ Description of the microservice:
val = input('you: ') val = input('you: ')
return val return val
def refine(self, microservice_description) -> TaskTree: def refine(self, microservice_description):
microservice_description, test_description = self.refine_description(microservice_description) microservice_description, test_description = self.refine_description(microservice_description)
return microservice_description, test_description return microservice_description, test_description
# sub_task_tree = self.construct_sub_task_tree(microservice_description) # sub_task_tree = self.construct_sub_task_tree(microservice_description)
# return sub_task_tree # return sub_task_tree
def get_nlp_fns(self, microservice_description):
return ask_gpt(
get_nlp_fns_prompt,
json_parser,
microservice_description=microservice_description
)
def construct_sub_task_tree(self, microservice_description):
"""
takes a microservice description and recursively constructs a tree of sub-tasks that need to be done to implement the microservice
"""
#
# nlp_fns = self.get_nlp_fns(
# microservice_description
# )
sub_task_tree_dict = ask_gpt(
construct_sub_task_tree_prompt, json_parser,
microservice_description=microservice_description,
# nlp_fns=nlp_fns
)
reflections = ask_gpt(
sub_task_tree_reflections_prompt, identity_parser,
microservice_description=microservice_description,
# nlp_fns=nlp_fns,
sub_task_tree=sub_task_tree_dict,
)
solutions = ask_gpt(
sub_task_tree_solutions_prompt, identity_parser,
# nlp_fns=nlp_fns,
microservice_description=microservice_description, sub_task_tree=sub_task_tree_dict,
reflections=reflections,
)
sub_task_tree_updated = ask_gpt(
sub_task_tree_update_prompt,
json_parser,
microservice_description=microservice_description,
# nlp_fns=nlp_fns,
sub_task_tree=sub_task_tree_dict, solutions=solutions
)
# for task_dict in self.iterate_over_sub_tasks(sub_task_tree_updated):
# task_dict.update(self.get_additional_task_info(task_dict['task']))
sub_task_tree = TaskTree.parse_obj(sub_task_tree_updated)
return sub_task_tree
def get_additional_task_info(self, sub_task_description):
additional_info_dict = self.get_additional_infos(
description=sub_task_description,
parameter={
'display_name': 'Task description',
'text': sub_task_description,
},
potentially_required_information_list=[
{
'field_name': 'api_key',
'display_name': 'valid API key',
}, {
'field_name': 'database_access',
'display_name': 'database access',
}, {
'field_name': 'documentation',
'display_name': 'documentation',
}, {
'field_name': 'example_api_call',
'display_name': 'curl command or sample code for api call',
},
],
)
return additional_info_dict
def get_additional_infos(self, description, parameter, potentially_required_information_list):
additional_info_dict = {}
for potentially_required_information in potentially_required_information_list:
is_task_requiring_information = ask_gpt(
is_task_requiring_information_template,
boolean_parser,
description=description,
description_title=parameter['display_name'],
description_text=parameter['text'],
potentially_required_information=potentially_required_information
)
if is_task_requiring_information:
generated_question = ask_gpt(
generate_question_for_required_information_template,
identity_parser,
description=description,
description_title=parameter['display_name'],
description_text=parameter['text'],
potentially_required_information=potentially_required_information
)
user_answer = input(generated_question)
additional_info_dict[potentially_required_information] = user_answer
return additional_info_dict
def iterate_over_sub_tasks(self, sub_task_tree_updated):
sub_tasks = sub_task_tree_updated['sub_tasks'] if 'sub_tasks' in sub_task_tree_updated else []
for sub_task in sub_tasks:
yield sub_task
yield from self.iterate_over_sub_tasks(sub_task)
def iterate_over_sub_tasks_pydantic(self, sub_task_tree: TaskTree) -> Generator[TaskTree, None, None]:
sub_tasks = sub_task_tree.sub_fns
for sub_task in sub_tasks:
yield sub_task
yield from self.iterate_over_sub_tasks_pydantic(sub_task)
def refine_description(self, microservice_description): def refine_description(self, microservice_description):
microservice_description = ask_gpt(better_description_prompt, identity_parser, context = {
microservice_description=microservice_description) 'microservice_description': microservice_description
request_schema = ask_gpt(generate_request_schema_prompt, identity_parser, }
microservice_description=microservice_description) self.auto_refine_description(context)
response_schema = ask_gpt(generate_output_schema_prompt, identity_parser, user_feedback_loop(context, microservice_description)
microservice_description=microservice_description, request_schema=request_schema)
# additional_specifications = self.add_additional_specifications(microservice_description, request_schema,
# response_schema)
microservice_description = ask_gpt(summarize_description_and_schemas_prompt, identity_parser,
microservice_description=microservice_description,
request_schema=request_schema,
response_schema=response_schema,
# additional_specifications=additional_specifications
)
while (user_feedback := self.get_user_feedback(microservice_description)):
microservice_description = ask_gpt(add_feedback_prompt, identity_parser,
microservice_description=microservice_description,
user_feedback=user_feedback)
test_description = ask_gpt( test_description = ask_gpt(
generate_test_description_prompt, generate_test_description_prompt,
identity_parser, identity_parser,
microservice_description=microservice_description, **context
request_schema=request_schema,
response_schema=response_schema
) )
example_file_url = get_user_input_if_needed( example_file_url = get_user_input_if_needed(
context={ context={
'Microservice description': microservice_description, 'Microservice description': microservice_description,
'Request schema': request_schema, 'Request schema': context['request_schema'],
'Response schema': response_schema, 'Response schema': context['response_schema'],
}, },
conditions=[ conditions=[
is_true('Does request schema contain an example file url?'), is_true('Does request schema contain an example file url?'),
@@ -192,30 +66,151 @@ Description of the microservice:
return microservice_description, test_description return microservice_description, test_description
def add_additional_specifications(self, microservice_description, request_schema, response_schema): def auto_refine_description(self, context):
questions = ask_gpt( context['microservice_description'] = ask_gpt(
ask_questions_prompt, identity_parser, better_description_prompt,
microservice_description=microservice_description,
request_schema=request_schema, response_schema=response_schema)
additional_specifications = ask_gpt(
answer_questions_prompt,
identity_parser, identity_parser,
microservice_description=microservice_description, **context
request_schema=request_schema, )
response_schema=response_schema, context['request_schema'] = ask_gpt(
questions=questions generate_request_schema_prompt,
identity_parser,
**context
)
context['response_schema'] = ask_gpt(
generate_output_schema_prompt,
identity_parser,
**context
)
context['microservice_description'] = ask_gpt(
summarize_description_and_schemas_prompt, identity_parser,
**context
) )
return additional_specifications
def get_user_feedback(self, microservice_description): # def get_nlp_fns(self, microservice_description):
while True: # return ask_gpt(
user_feedback = input( # get_nlp_fns_prompt,
f'I suggest that we implement the following microservice:\n{microservice_description}\nDo you agree? [y/n]') # json_parser,
if user_feedback.lower() in ['y', 'yes', 'yeah', 'yep', 'yup', 'sure', 'ok', 'okay']: # microservice_description=microservice_description
print('Great! I will hand this over to the developers!') # )
return None #
elif user_feedback.lower() in ['n', 'no', 'nope', 'nah', 'nay', 'not']: # def construct_sub_task_tree(self, microservice_description):
return input('What do you want to change?') # """
# takes a microservice description and recursively constructs a tree of sub-tasks that need to be done to implement the microservice
# """
# #
# # nlp_fns = self.get_nlp_fns(
# # microservice_description
# # )
#
# sub_task_tree_dict = ask_gpt(
# construct_sub_task_tree_prompt, json_parser,
# microservice_description=microservice_description,
# # nlp_fns=nlp_fns
# )
# reflections = ask_gpt(
# sub_task_tree_reflections_prompt, identity_parser,
# microservice_description=microservice_description,
# # nlp_fns=nlp_fns,
# sub_task_tree=sub_task_tree_dict,
# )
# solutions = ask_gpt(
# sub_task_tree_solutions_prompt, identity_parser,
# # nlp_fns=nlp_fns,
# microservice_description=microservice_description, sub_task_tree=sub_task_tree_dict,
# reflections=reflections,
# )
# sub_task_tree_updated = ask_gpt(
# sub_task_tree_update_prompt,
# json_parser,
# microservice_description=microservice_description,
# # nlp_fns=nlp_fns,
# sub_task_tree=sub_task_tree_dict, solutions=solutions
# )
# # for task_dict in self.iterate_over_sub_tasks(sub_task_tree_updated):
# # task_dict.update(self.get_additional_task_info(task_dict['task']))
#
# sub_task_tree = TaskTree.parse_obj(sub_task_tree_updated)
# return sub_task_tree
# def get_additional_task_info(self, sub_task_description):
# additional_info_dict = self.get_additional_infos(
# description=sub_task_description,
# parameter={
# 'display_name': 'Task description',
# 'text': sub_task_description,
# },
# potentially_required_information_list=[
# {
# 'field_name': 'api_key',
# 'display_name': 'valid API key',
# }, {
# 'field_name': 'database_access',
# 'display_name': 'database access',
# }, {
# 'field_name': 'documentation',
# 'display_name': 'documentation',
# }, {
# 'field_name': 'example_api_call',
# 'display_name': 'curl command or sample code for api call',
# },
# ],
#
# )
# return additional_info_dict
# def get_additional_infos(self, description, parameter, potentially_required_information_list):
# additional_info_dict = {}
# for potentially_required_information in potentially_required_information_list:
# is_task_requiring_information = ask_gpt(
# is_task_requiring_information_template,
# boolean_parser,
# description=description,
# description_title=parameter['display_name'],
# description_text=parameter['text'],
# potentially_required_information=potentially_required_information
# )
# if is_task_requiring_information:
# generated_question = ask_gpt(
# generate_question_for_required_information_template,
# identity_parser,
# description=description,
# description_title=parameter['display_name'],
# description_text=parameter['text'],
# potentially_required_information=potentially_required_information
# )
# user_answer = input(generated_question)
# additional_info_dict[potentially_required_information] = user_answer
# return additional_info_dict
# def iterate_over_sub_tasks(self, sub_task_tree_updated):
# sub_tasks = sub_task_tree_updated['sub_tasks'] if 'sub_tasks' in sub_task_tree_updated else []
# for sub_task in sub_tasks:
# yield sub_task
# yield from self.iterate_over_sub_tasks(sub_task)
#
# def iterate_over_sub_tasks_pydantic(self, sub_task_tree: TaskTree) -> Generator[TaskTree, None, None]:
# sub_tasks = sub_task_tree.sub_fns
# for sub_task in sub_tasks:
# yield sub_task
# yield from self.iterate_over_sub_tasks_pydantic(sub_task)
# def add_additional_specifications(self, microservice_description, request_schema, response_schema):
# questions = ask_gpt(
# ask_questions_prompt, identity_parser,
# microservice_description=microservice_description,
# request_schema=request_schema, response_schema=response_schema)
# additional_specifications = ask_gpt(
# answer_questions_prompt,
# identity_parser,
# microservice_description=microservice_description,
# request_schema=request_schema,
# response_schema=response_schema,
# questions=questions
# )
# return additional_specifications
# return self.refine_user_feedback(microservice_description) # return self.refine_user_feedback(microservice_description)
# def refine_user_feedback(self, microservice_description): # def refine_user_feedback(self, microservice_description):
@@ -247,7 +242,7 @@ Example for the description: "return the average temperature of the 5 days weath
generate_request_schema_prompt = client_description + ''' generate_request_schema_prompt = client_description + '''
Generate the lean request json schema of the Microservice. Generate the lean request json schema of the Microservice.
Note: If you are not sure about the details, the come up with the minimal number of parameters possible.''' Note: If you are not sure about the details, then come up with the minimal number of parameters possible.'''
generate_output_schema_prompt = client_description + ''' generate_output_schema_prompt = client_description + '''
request json schema: request json schema:
@@ -255,7 +250,7 @@ request json schema:
{request_schema} {request_schema}
``` ```
Generate the lean response json schema for the Microservice. Generate the lean response json schema for the Microservice.
Note: If you are not sure about the details, the come up with the minimal number of parameters possible.''' Note: If you are not sure about the details, then come up with the minimal number of parameters possible.'''
# If we want to activate this back, then it first needs to work. Currently, it outputs "no" for too many cases. # If we want to activate this back, then it first needs to work. Currently, it outputs "no" for too many cases.
# is_feedback_valuable_prompt = client_description + ''' # is_feedback_valuable_prompt = client_description + '''
@@ -282,132 +277,127 @@ Note: You must not mention any details about algorithms or the technical impleme
Note: You must not mention that there is a request and response JSON schema Note: You must not mention that there is a request and response JSON schema
Note: You must not use any formatting like triple backticks.''' Note: You must not use any formatting like triple backticks.'''
add_feedback_prompt = client_description + '''
User feedback:
```
{user_feedback}
```
Update the microservice description by incorporating the user feedback in a concise way without losing any information.'''
summarize_description_prompt = client_description + '''
Make the description more concise without losing any information.
Note: You must not mention any details about algorithms or the technical implementation.
Note: You must ignore facts that are not specified.
Note: You must ignore facts that are not relevant.
Note: You must ignore facts that are unknown.
Note: You must ignore facts that are unclear.'''
construct_sub_task_tree_prompt = client_description + ''' # summarize_description_prompt = client_description + '''
Recursively constructs a tree of functions that need to be implemented for the endpoint_function that retrieves a json string and returns a json string. # Make the description more concise without losing any information.
Example: # Note: You must not mention any details about algorithms or the technical implementation.
Input: "Input: list of integers, Output: Audio file of short story where each number is mentioned exactly once." # Note: You must ignore facts that are not specified.
Output: # Note: You must ignore facts that are not relevant.
{{ # Note: You must ignore facts that are unknown.
"description": "Create an audio file containing a short story in which each integer from the provided list is seamlessly incorporated, ensuring that every integer is mentioned exactly once.", # Note: You must ignore facts that are unclear.'''
"python_fn_signature": "def generate_integer_story_audio(numbers: List[int]) -> str:",
"sub_fns": [
{{
"description": "Generate sentence from integer.",
"python_fn_signature": "def generate_sentence_from_integer(number: int) -> int:",
"sub_fns": []
}},
{{
"description": "Convert the story into an audio file.",
"python_fn_signature": "def convert_story_to_audio(story: str) -> bytes:",
"sub_fns": []
}}
]
}}
Note: you must only output the json string - nothing else. # construct_sub_task_tree_prompt = client_description + '''
Note: you must pretty print the json string.''' # Recursively constructs a tree of functions that need to be implemented for the endpoint_function that retrieves a json string and returns a json string.
# Example:
# Input: "Input: list of integers, Output: Audio file of short story where each number is mentioned exactly once."
# Output:
# {{
# "description": "Create an audio file containing a short story in which each integer from the provided list is seamlessly incorporated, ensuring that every integer is mentioned exactly once.",
# "python_fn_signature": "def generate_integer_story_audio(numbers: List[int]) -> str:",
# "sub_fns": [
# {{
# "description": "Generate sentence from integer.",
# "python_fn_signature": "def generate_sentence_from_integer(number: int) -> int:",
# "sub_fns": []
# }},
# {{
# "description": "Convert the story into an audio file.",
# "python_fn_signature": "def convert_story_to_audio(story: str) -> bytes:",
# "sub_fns": []
# }}
# ]
# }}
#
# Note: you must only output the json string - nothing else.
# Note: you must pretty print the json string.'''
sub_task_tree_reflections_prompt = client_description + ''' # sub_task_tree_reflections_prompt = client_description + '''
Sub task tree: # Sub task tree:
``` # ```
{sub_task_tree} # {sub_task_tree}
``` # ```
Write down 3 arguments why the sub task tree might not perfectly represents the information mentioned in the microservice description. (5 words per argument)''' # Write down 3 arguments why the sub task tree might not perfectly represents the information mentioned in the microservice description. (5 words per argument)'''
#
# sub_task_tree_solutions_prompt = client_description + '''
# Sub task tree:
# ```
# {sub_task_tree}
# ```
# Reflections:
# ```
# {reflections}
# ```
# For each constructive criticism, write a solution (5 words) that address the criticism.'''
#
# sub_task_tree_update_prompt = client_description + '''
# Sub task tree:
# ```
# {sub_task_tree}
# ```
# Solutions:
# ```
# {solutions}
# ```
# Update the sub task tree by applying the solutions. (pretty print the json string)'''
#
# ask_questions_prompt = client_description + '''
# Request json schema:
# ```
# {request_schema}
# ```
# Response json schema:
# ```
# {response_schema}
# ```
# Ask the user up to 5 unique detailed questions (5 words) about the microservice description that are not yet answered.
# '''
sub_task_tree_solutions_prompt = client_description + ''' # answer_questions_prompt = client_description + '''
Sub task tree: # Request json schema:
``` # ```
{sub_task_tree} # {request_schema}
``` # ```
Reflections: # Response json schema:
``` # ```
{reflections} # {response_schema}
``` # ```
For each constructive criticism, write a solution (5 words) that address the criticism.''' # Questions:
# ```
# {questions}
# ```
# Answer all questions where you can think of a plausible answer.
# Note: You must not answer questions with something like "...is not specified", "I don't know" or "Unknown".
# '''
sub_task_tree_update_prompt = client_description + ''' # is_task_requiring_information_template = '''\
Sub task tree: # {description_title}
``` # ```
{sub_task_tree} # {description_text}
``` # ```
Solutions: # Does the implementation of the {description_title} require information about "{potentially_required_information}"?
``` # Note: You must either answer "yes" or "no".'''
{solutions}
```
Update the sub task tree by applying the solutions. (pretty print the json string)'''
ask_questions_prompt = client_description + ''' # generate_question_for_required_information_template = '''\
Request json schema: # {description_title}
``` # ```
{request_schema} # {description_text}
``` # ```
Response json schema: # Generate a question that asks for the information "{potentially_required_information}" regarding "{description_title}".
``` # Note: you must only output the question - nothing else.'''
{response_schema}
```
Ask the user up to 5 unique detailed questions (5 words) about the microservice description that are not yet answered.
'''
answer_questions_prompt = client_description + ''' # get_nlp_fns_prompt = client_description + '''
Request json schema: # Respond with all code parts that could be accomplished by GPT 3.
``` # Example for "Take a video and/or a pdf as input, extract the subtitles from the video and the text from the pdf, \
{request_schema} # summarize the extracted text and translate it to German":
``` # ```
Response json schema: # [
``` # "summarize the text",
{response_schema} # "translate the text to German"
``` # ]
Questions: # ```
``` # Note: only list code parts that could be expressed as a function that takes a string as input and returns a string as output.
{questions} # Note: the output must be parsable by the python function json.loads.'''
```
Answer all questions where you can think of a plausible answer.
Note: You must not answer questions with something like "...is not specified", "I don't know" or "Unknown".
'''
is_task_requiring_information_template = '''\
{description_title}
```
{description_text}
```
Does the implementation of the {description_title} require information about "{potentially_required_information}"?
Note: You must either answer "yes" or "no".'''
generate_question_for_required_information_template = '''\
{description_title}
```
{description_text}
```
Generate a question that asks for the information "{potentially_required_information}" regarding "{description_title}".
Note: you must only output the question - nothing else.'''
get_nlp_fns_prompt = client_description + '''
Respond with all code parts that could be accomplished by GPT 3.
Example for "Take a video and/or a pdf as input, extract the subtitles from the video and the text from the pdf, \
summarize the extracted text and translate it to German":
```
[
"summarize the text",
"translate the text to German"
]
```
Note: only list code parts that could be expressed as a function that takes a string as input and returns a string as output.
Note: the output must be parsable by the python function json.loads.'''
generate_test_description_prompt = client_description + ''' generate_test_description_prompt = client_description + '''
Request json schema: Request json schema:

View File

@@ -1,22 +1,22 @@
from typing import Dict, List, Union, Optional # from typing import Dict, List, Union, Optional
from pydantic import BaseModel, Field # from pydantic import BaseModel, Field
#
class JSONSchema(BaseModel): # class JSONSchema(BaseModel):
type: str # type: str
format: Union[str, None] = None # format: Union[str, None] = None
items: Union['JSONSchema', None] = None # items: Union['JSONSchema', None] = None
properties: Dict[str, 'JSONSchema'] = Field(default_factory=dict) # properties: Dict[str, 'JSONSchema'] = Field(default_factory=dict)
additionalProperties: Union[bool, 'JSONSchema'] = True # additionalProperties: Union[bool, 'JSONSchema'] = True
required: List[str] = Field(default_factory=list) # required: List[str] = Field(default_factory=list)
#
class Config: # class Config:
arbitrary_types_allowed = True # arbitrary_types_allowed = True
#
class TaskTree(BaseModel): # class TaskTree(BaseModel):
description: Optional[str] # description: Optional[str]
python_fn_signature: str # python_fn_signature: str
sub_fns: List['TaskTree'] # sub_fns: List['TaskTree']
#
JSONSchema.update_forward_refs() # JSONSchema.update_forward_refs()
TaskTree.update_forward_refs() # TaskTree.update_forward_refs()
#

View File

@@ -1,11 +1,9 @@
import os # import os
#
from dev_gpt.apis import gpt # from dev_gpt.options.generate.pm.pm import PM
from dev_gpt.options.generate.pm.pm import PM #
# def test_construct_sub_task_tree():
def test_construct_sub_task_tree(): # os.environ['VERBOSE'] = 'true'
os.environ['VERBOSE'] = 'true' # pm = PM()
gpt_session = gpt.GPTSession('test', model='gpt-3.5-turbo') # microservice_description = 'This microservice receives an image as input and generates a joke based on what is depicted on the image. The input must be a binary string of the image. The output is an image with the generated joke overlaid on it.'
pm = PM(gpt_session) # sub_task_tree = pm.construct_sub_task_tree(microservice_description)
microservice_description = 'This microservice receives an image as input and generates a joke based on what is depicted on the image. The input must be a binary string of the image. The output is an image with the generated joke overlaid on it.'
sub_task_tree = pm.construct_sub_task_tree(microservice_description)