diff --git a/dev_gpt/apis/gpt.py b/dev_gpt/apis/gpt.py index 335eab0..7fe21ca 100644 --- a/dev_gpt/apis/gpt.py +++ b/dev_gpt/apis/gpt.py @@ -17,6 +17,7 @@ from urllib3.exceptions import InvalidChunkLength from dev_gpt.constants import PRICING_GPT4_PROMPT, PRICING_GPT4_GENERATION, PRICING_GPT3_5_TURBO_PROMPT, \ PRICING_GPT3_5_TURBO_GENERATION, CHARS_PER_TOKEN from dev_gpt.options.generate.conversation_logger import ConversationLogger +from dev_gpt.options.generate.parser import identity_parser from dev_gpt.options.generate.templates_system import template_system_message_base from dev_gpt.utils.string_tools import print_colored, get_template_parameters @@ -170,7 +171,7 @@ class _GPTConversation: return SystemMessage(content=system_message) -def ask_gpt(prompt_template, parser, **kwargs): +def ask_gpt(prompt_template, parser=identity_parser, **kwargs): template_parameters = get_template_parameters(prompt_template) if set(template_parameters) != set(kwargs.keys()): raise ValueError( diff --git a/dev_gpt/options/generate/generator.py b/dev_gpt/options/generate/generator.py index ee83f86..3a395b9 100644 --- a/dev_gpt/options/generate/generator.py +++ b/dev_gpt/options/generate/generator.py @@ -12,7 +12,7 @@ from langchain.schema import SystemMessage, AIMessage from pydantic.dataclasses import dataclass from dev_gpt.apis import gpt -from dev_gpt.apis.gpt import _GPTConversation +from dev_gpt.apis.gpt import _GPTConversation, ask_gpt from dev_gpt.apis.jina_cloud import process_error_message, push_executor, is_executor_in_hub from dev_gpt.apis.pypi import is_package_on_pypi, clean_requirements_txt from dev_gpt.constants import FILE_AND_TAG_PAIRS, NUM_IMPLEMENTATION_STRATEGIES, MAX_DEBUGGING_ITERATIONS, \ @@ -512,14 +512,7 @@ pytest return 'yes' in answer.lower() def generate_microservice_name(self, description): - name = self.generate_and_persist_file( - section_title='Generate microservice name', - template=template_generate_microservice_name, - destination_folder=self.microservice_root_path, - file_name_s=['name.txt'], - description=description - )['name.txt'] - return name + return ask_gpt(template_generate_microservice_name, description=description) def get_possible_packages(self): print_colored('', '\n\n############# What packages to use? #############', 'blue') diff --git a/dev_gpt/options/generate/templates_user.py b/dev_gpt/options/generate/templates_user.py index 47442e8..bfda955 100644 --- a/dev_gpt/options/generate/templates_user.py +++ b/dev_gpt/options/generate/templates_user.py @@ -28,28 +28,27 @@ The implemented function and the test must not have a __main__ function. The implemented function and the test must use gpt_3_5_turbo if the task requires the processing of language (e.g., understanding, generation, summarization, transformation, translation, modification or classification). Other language models are not allowed. The implemented function must follow a clean and elegant coding style.''' +plain_response_format_prompt = '''The response must be raw text and not fenced by markdown or html''' template_generate_microservice_name = PromptTemplate.from_template( - '''Generate a name for the executor matching the description: -"{description}" + f'''Generate a name for the executor matching the description: +"{{description}}" The executor name must fulfill the following criteria: - camel case - start with a capital letter - only consists of lower and upper case characters - end with Executor. - -Your response must exactly match the following block code format (double asterisks for the file name and triple backticks for the file block): -**name.txt** -``` - -``` -Example for: "Get a png as input and return a vectorized version as svg.": -**name.txt** +{plain_response_format_prompt} +Positive example: +PngToSvgExecutor +Negative example: ``` PngToSvgExecutor -```''' +``` +''' ) + # todo split into multiple calls. One for brainstorming - one for the final answer template_generate_possible_packages = PromptTemplate.from_template( '''Here is the task description of the problem you need to solve: