From b337f74d24d38b4b8e12e1af87fa99ccccda4528 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Florian=20Ho=CC=88nicke?= Date: Thu, 11 May 2023 21:53:21 +0200 Subject: [PATCH 1/3] =?UTF-8?q?=F0=9F=8F=83=20fix:=20run=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev_gpt/apis/gpt.py | 2 +- .../options/generate/static_files/microservice/jina_wrapper.py | 2 +- dev_gpt/options/generate/templates_user.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dev_gpt/apis/gpt.py b/dev_gpt/apis/gpt.py index 949fb4a..44d329f 100644 --- a/dev_gpt/apis/gpt.py +++ b/dev_gpt/apis/gpt.py @@ -77,7 +77,7 @@ class GPTSession: }] ) break - except RateLimitError: + except (RateLimitError, openai.error.APIError): sleep(1) continue return True diff --git a/dev_gpt/options/generate/static_files/microservice/jina_wrapper.py b/dev_gpt/options/generate/static_files/microservice/jina_wrapper.py index bfc4553..5cd7191 100644 --- a/dev_gpt/options/generate/static_files/microservice/jina_wrapper.py +++ b/dev_gpt/options/generate/static_files/microservice/jina_wrapper.py @@ -11,5 +11,5 @@ class DevGPTExecutor(Executor): @jina_requests() def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray: for d in docs: - d.text = json.dumps(func(json.loads(d.text))) + d.text = func(d.text) return docs diff --git a/dev_gpt/options/generate/templates_user.py b/dev_gpt/options/generate/templates_user.py index 8eea63f..a7a373e 100644 --- a/dev_gpt/options/generate/templates_user.py +++ b/dev_gpt/options/generate/templates_user.py @@ -105,7 +105,7 @@ generated_string = gpt(prompt) # fill-in the prompt (str); the output is a stri template_generate_function = PromptTemplate.from_template( general_guidelines_string + ''' -Write a python function which receives as input a dictionary and outputs a dictionary. The function is called 'func'. +Write a python function which receives as input json string (that can be parsed with the python function json.loads) and outputs a json string (that can be parsed with the python function json.loads). The function is called 'func'. The function must full-fill: '{microservice_description}'. It will be tested with the following scenario: '{test_description}'. For the implementation use the following package(s): '{packages}'. From a8505031d14f2d159b56a81b763c650e4d935718 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Florian=20Ho=CC=88nicke?= Date: Fri, 12 May 2023 15:25:38 +0200 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=8F=83=20fix:=20run=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev_gpt/options/generate/pm/pm.py | 2 +- dev_gpt/options/generate/templates_user.py | 18 +++++++++++++----- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/dev_gpt/options/generate/pm/pm.py b/dev_gpt/options/generate/pm/pm.py index 105eb7a..7f3dfa5 100644 --- a/dev_gpt/options/generate/pm/pm.py +++ b/dev_gpt/options/generate/pm/pm.py @@ -63,7 +63,7 @@ Description of the microservice: condition_question='Does the microservice send requests to an API?', question_gen='Generate a question that asks for the endpoint and an example of a request and response when interacting with the external API.', extension_name='Example of API usage', - post_transformation_fn=translation(from_format='api instruction', to_format='python code snippet') + post_transformation_fn=translation(from_format='api instruction', to_format='python code snippet raw without formatting') ) return microservice_description, test_description diff --git a/dev_gpt/options/generate/templates_user.py b/dev_gpt/options/generate/templates_user.py index a7a373e..efc9c38 100644 --- a/dev_gpt/options/generate/templates_user.py +++ b/dev_gpt/options/generate/templates_user.py @@ -105,14 +105,18 @@ generated_string = gpt(prompt) # fill-in the prompt (str); the output is a stri template_generate_function = PromptTemplate.from_template( general_guidelines_string + ''' -Write a python function which receives as input json string (that can be parsed with the python function json.loads) and outputs a json string (that can be parsed with the python function json.loads). The function is called 'func'. -The function must full-fill: '{microservice_description}'. +Write a python function which receives as \ +input json string (that can be parsed with the python function json.loads) and \ +outputs a json string (that can be parsed with the python function json.loads). \ +The function is called 'func'. +The function must fulfill the following description: '{microservice_description}'. It will be tested with the following scenario: '{test_description}'. For the implementation use the following package(s): '{packages}'. -The code must start with the following import: +The code must start with the following imports: ``` from .apis import GPT_3_5_Turbo +import json ``` Obey the following rules: ''' + not_allowed_function_string + ''' @@ -134,9 +138,10 @@ template_generate_test = PromptTemplate.from_template( Write a single pytest case that tests the following scenario: '{test_description}'. In case the test scenario is not precise enough, test a general case without any assumptions. Start the test with an extensive comment about the test case. If gpt_3_5_turbo is used in the executor, then the test must not check the exact output of the executor as it is not deterministic. -The test must start with the following import: +The test must start with the following imports: ``` from .microservice import func +import json ``` ''' + not_allowed_function_string + ''' The test must not open local files. @@ -331,7 +336,10 @@ Example: **microservice.py** ```python -print('hello world') +import json + +def func(json_input: str) -> str: + return json_input['img_base64'] ```''' ) From 203ceea068f166e1b49f856701714d9d6ecee9d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Florian=20Ho=CC=88nicke?= Date: Fri, 12 May 2023 15:52:24 +0200 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=8F=83=20fix:=20run=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev_gpt/options/generate/templates_user.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/dev_gpt/options/generate/templates_user.py b/dev_gpt/options/generate/templates_user.py index efc9c38..fff03ee 100644 --- a/dev_gpt/options/generate/templates_user.py +++ b/dev_gpt/options/generate/templates_user.py @@ -153,9 +153,9 @@ The test must not set any environment variables which require a key. template_generate_requirements = PromptTemplate.from_template( - general_guidelines_string + ''' + general_guidelines_string + f''' -{code_files_wrapped} +{{code_files_wrapped}} Write the content of the requirements.txt file like this: **requirements.txt** @@ -165,9 +165,11 @@ Write the content of the requirements.txt file like this: Add any more packages that are needed to run the code. You must not add gpt_3_5_turbo to the requirements.txt file. -All versions are fixed using ~=, ==, <, >, <=, >=. The package versions must not have conflicts. Output only the requirements.txt file. -''' + '\n' + template_code_wrapping_string -) +All versions are fixed using ~=, ==, <, >, <=, >=. The package versions must not have conflicts. + +{template_code_wrapping_string} +Note: you must only output the requirements.txt file - no other file. +''') template_generate_apt_get_install = PromptTemplate.from_template(