👩‍💼 feat: pm role test

This commit is contained in:
Florian Hönicke
2023-04-28 18:57:57 +02:00
parent ab08785ae0
commit cca9c5caec
4 changed files with 14 additions and 32 deletions

2
requirements-test.txt Normal file
View File

@@ -0,0 +1,2 @@
pytest
pytest-split

View File

@@ -11,6 +11,7 @@ from langchain.schema import SystemMessage, HumanMessage, AIMessage
from pydantic.dataclasses import dataclass
from src.apis import gpt
from src.apis.gpt import _GPTConversation
from src.apis.jina_cloud import process_error_message, push_executor, is_executor_in_hub
from src.constants import FILE_AND_TAG_PAIRS, NUM_IMPLEMENTATION_STRATEGIES, MAX_DEBUGGING_ITERATIONS, \
PROBLEMATIC_PACKAGES, EXECUTOR_FILE_NAME, TEST_EXECUTOR_FILE_NAME, TEST_EXECUTOR_FILE_TAG, \
@@ -110,7 +111,7 @@ metas:
parse_result_fn = self.get_default_parse_result_fn(file_name_s)
print_colored('', f'\n\n############# {section_title} #############', 'blue')
system_introduction_message = self._create_system_message(self.microservice_specification.task, self.microservice_specification.test, system_definition_examples)
system_introduction_message = _GPTConversation._create_system_message(self.microservice_specification.task, self.microservice_specification.test, system_definition_examples)
conversation = self.gpt_session.get_conversation(messages=[system_introduction_message])
template_kwargs = {k: v for k, v in template_kwargs.items() if k in template.input_variables}
if 'file_name' in template.input_variables and len(file_name_s) == 1:

View File

@@ -22,8 +22,10 @@ The implemented function and the test must not use a pre-trained model unless it
The implemented function and the test must not train a model.
The implemented function and the test must not contain prototype or placeholder implementations.
The implemented function and the test must run in a docker container based on debian.
The implemented function and the test must use gpt_3_5_turbo_api if the task requires understanding or generating natural language or using any language model. Other language models are not allowed.'''
The implemented function and the test must not use libraries like Flask.
The implemented function and the test must not have a __main__ function.
The implemented function and the test must use gpt_3_5_turbo_api if the task requires understanding or generation of natural language or using any language model. Other language models are not allowed.
The implemented function and the test must not use gpt_3_5_turbo_api or any other language model if the task does not require understanding or generation of natural language.'''
template_generate_microservice_name = PromptTemplate.from_template(
@@ -87,7 +89,7 @@ template_code_wrapping_string = '''The code will go into {file_name_purpose}. Ma
You must provide the complete file with the exact same syntax to wrap the code.'''
gpt_35_turbo_usage_string = """If you use gpt_3_5_turbo_api, then this is an example on how to use it:
gpt_35_turbo_usage_string = """If need to use gpt_3_5_turbo_api, then this is an example on how to use it:
```
from .apis import GPT_3_5_Turbo_API
@@ -151,8 +153,7 @@ template_generate_requirements = PromptTemplate.from_template(
{code_files_wrapped}
Write the content of the requirements.txt file.
The requirements.txt file must include the following packages:
**requirements.txt**
The requirements.txt file must include the following packages in that specified version:
```
jina==3.15.1.dev14
docarray==0.21.0

View File

@@ -1,29 +1,7 @@
import unittest.mock as mock
import os
from src.options.generate.generator import Generator
from src.apis.gpt import GPTSession
def test_generator(tmpdir):
# Define a mock response
mock_response = {
"choices": [
{
"delta": {
"content": "This is a mock response."
}
}
]
}
# Define a function to replace openai.ChatCompletion.create
def mock_create(*args, **kwargs):
return [mock_response] * kwargs.get("stream", 1)
# Define a function to replace get_openai_api_key
def mock_get_openai_api_key(*args, **kwargs):
pass
# Use mock.patch as a context manager to replace the original methods with the mocks
with mock.patch("openai.ChatCompletion.create", side_effect=mock_create), \
mock.patch.object(GPTSession, "configure_openai_api_key", side_effect=mock_get_openai_api_key):
generator = Generator("my description", "my test")
generator.generate(str(tmpdir))
os.environ['VERBOSE'] = 'true'
generator = Generator("The microservice is very simple, it does not take anything as input and only outputs the word 'test'", "my test", str(tmpdir) + 'microservice', 'gpt-3.5-turbo')
generator.generate()