diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c62554b..76e6d66 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,12 +5,12 @@ on: pull_request: jobs: - test: + test_level: runs-on: ubuntu-latest strategy: fail-fast: false matrix: - group: [1, 2, 3, 4, 5] + group: [0, 1, 2] steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 @@ -27,7 +27,7 @@ jobs: - name: Test id: test run: | - pytest -v -s -m "not gpu" --splits 5 --group ${{ matrix.group }} --splitting-algorithm least_duration test/ + pytest -vs test/test_generator.py::test_generation_level_${{ matrix.group }} timeout-minutes: 10 env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} diff --git a/.test_durations b/.test_durations deleted file mode 100644 index 9ee8723..0000000 --- a/.test_durations +++ /dev/null @@ -1,9 +0,0 @@ -{ - "test/test_generator.py::test_generation_level_0": 100, - "test/test_generator.py::test_generation_level_1": 100, - "test/test_generator.py::test_generation_level_2": 100, - "test/test_generator.py::test_generation_level_3": 100, - "test/test_generator.py::test_generation_level_4": 100, - "test/test_hub.py::test_is_microservice_in_hub": 1, - "test/test_strings.py::test_clean_color_codes": 1 -} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 65abdd2..8dbf199 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,8 @@ jina==3.15.1.dev14 click streamlit==1.9.0 -openai>=0.26.0 +openai>=0.27.5 psutil jcloud jina-hubble-sdk -langchain \ No newline at end of file +langchain==0.0.153 \ No newline at end of file diff --git a/src/apis/gpt.py b/src/apis/gpt.py index c386f6a..16a53af 100644 --- a/src/apis/gpt.py +++ b/src/apis/gpt.py @@ -108,24 +108,26 @@ class _GPTConversation: self.messages = messages self.print_stream = print_stream self.print_costs = print_costs - for message in messages: + + + def print_messages(self, messages): + for i, message in enumerate(messages): if os.environ['VERBOSE'].lower() == 'true': if isinstance(message, SystemMessage): - print_colored('system - prompt', message.content, 'magenta') + print_colored(f'({i}) system - prompt', message.content, 'magenta') elif isinstance(message, HumanMessage): - print_colored('user - prompt', message.content, 'blue') + print_colored(f'({i}) user - prompt', message.content, 'blue') elif isinstance(message, AIMessage): - print_colored('assistant - prompt', message.content, 'green') + print_colored(f'({i}) assistant - prompt', message.content, 'green') def chat(self, prompt: str, role: str = 'user'): MassageClass = HumanMessage if role == 'user' else SystemMessage chat_message = MassageClass(content=prompt) self.messages.append(chat_message) - if os.environ['VERBOSE'].lower() == 'true': - color = 'blue' if role == 'user' else 'magenta' - print_colored(role, prompt, color) + self.print_messages(self.messages) if self.print_stream: print_colored('assistant', '', 'green', end='') + print('thinking...') for i in range(10): try: response = self._chat(self.messages) diff --git a/src/options/generate/generator.py b/src/options/generate/generator.py index f4170bb..ad6b45b 100644 --- a/src/options/generate/generator.py +++ b/src/options/generate/generator.py @@ -1,3 +1,4 @@ +import json import os import random import re @@ -24,7 +25,8 @@ from src.options.generate.templates_user import template_generate_microservice_n template_solve_pip_dependency_issue, template_is_dependency_issue, template_generate_playground, \ template_generate_function, template_generate_test, template_generate_requirements, \ template_chain_of_thought, template_summarize_error, \ - template_generate_apt_get_install, template_solve_apt_get_dependency_issue, template_refinement + template_generate_apt_get_install, template_solve_apt_get_dependency_issue, template_pm_task_iteration, \ + template_pm_test_iteration from src.options.generate.ui import get_random_employee from src.utils.io import persist_file, get_all_microservice_files_with_content, get_microservice_path @@ -44,7 +46,7 @@ class Generator: def extract_content_from_result(self, plain_text, file_name, match_single_block=False, can_contain_code_block=True): optional_line_break = '\n' if can_contain_code_block else '' # the \n at the end makes sure that ``` within the generated code is not matched because it is not right before a line break - pattern = fr"^\*\*{file_name}\*\*\n```(?:\w+\n)?([\s\S]*?){optional_line_break}```" + pattern = fr"\*?\*?{file_name}\*?\*?\n```(?:\w+\n)?([\s\S]*?){optional_line_break}```" match = re.search(pattern, plain_text, re.MULTILINE) if match: return match.group(1).strip() @@ -181,20 +183,31 @@ metas: }), file_name_purpose=REQUIREMENTS_FILE_NAME, file_name_s=[REQUIREMENTS_FILE_NAME], + parse_result_fn=self.parse_result_fn_requirements, tag_name=REQUIREMENTS_FILE_TAG, )[REQUIREMENTS_FILE_NAME] - self.generate_and_persist_file( - section_title='Generate Dockerfile', - template=template_generate_apt_get_install, - destination_folder=MICROSERVICE_FOLDER_v1, - file_name_s=None, - parse_result_fn=self.parse_result_fn_dockerfile, - docker_file_wrapped=self.read_docker_template(), - requirements_file_wrapped=self.files_to_string({ - REQUIREMENTS_FILE_NAME: requirements_content, - }) - ) + # I deactivated this because 3.5-turbo was halucinating packages that were not needed + # now, in the first iteration the default dockerfile is used + # self.generate_and_persist_file( + # section_title='Generate Dockerfile', + # template=template_generate_apt_get_install, + # destination_folder=MICROSERVICE_FOLDER_v1, + # file_name_s=None, + # parse_result_fn=self.parse_result_fn_dockerfile, + # docker_file_wrapped=self.read_docker_template(), + # requirements_file_wrapped=self.files_to_string({ + # REQUIREMENTS_FILE_NAME: requirements_content, + # }) + # ) + + + with open(os.path.join(os.path.dirname(__file__), 'static_files', 'microservice', 'Dockerfile'), 'r', encoding='utf-8') as f: + docker_file_template_lines = f.readlines() + docker_file_template_lines = [line for line in docker_file_template_lines if not line.startswith('RUN apt-get update')] + docker_file_content = '\n'.join(docker_file_template_lines) + persist_file(docker_file_content, os.path.join(MICROSERVICE_FOLDER_v1, 'Dockerfile')) + self.write_config_yml(microservice_name, MICROSERVICE_FOLDER_v1) @@ -206,8 +219,23 @@ metas: return f.read() def parse_result_fn_dockerfile(self, content_raw: str): + json_string = self.extract_content_from_result(content_raw, 'apt-get-packages.json', match_single_block=True) + packages = ' '.join(json.loads(json_string)['packages']) + docker_file_template = self.read_docker_template() - return {DOCKER_FILE_NAME: docker_file_template.replace('{{apt_get_packages}}', '{apt_get_packages}').format(apt_get_packages=content_raw)} + return {DOCKER_FILE_NAME: docker_file_template.replace('{{apt_get_packages}}', '{apt_get_packages}').format(apt_get_packages=packages)} + + def parse_result_fn_requirements(self, content_raw: str): + content_parsed = self.extract_content_from_result(content_raw, 'requirements.txt', match_single_block=True) + + lines = content_parsed.split('\n') + lines = [line for line in lines if not any([pkg in line for pkg in ['jina', 'docarray', 'openai', 'pytest', 'gpt_3_5_turbo_api']])] + content_modified = f'''jina==3.15.1.dev14 +docarray==0.21.0 +openai==0.27.5 +pytest +{os.linesep.join(lines)}''' + return {REQUIREMENTS_FILE_NAME: content_modified} def generate_playground(self, microservice_name, microservice_path): print_colored('', '\n\n############# Playground #############', 'blue') @@ -301,7 +329,7 @@ metas: section_title='Debugging apt-get dependency issue', template=template_solve_apt_get_dependency_issue, destination_folder=next_microservice_path, - file_name_s=None, + file_name_s=['apt-get-packages.json'], parse_result_fn=self.parse_result_fn_dockerfile, system_definition_examples=[], summarized_error=summarized_error, @@ -363,15 +391,17 @@ metas: def get_possible_packages(self): print_colored('', '\n\n############# What packages to use? #############', 'blue') - packages_csv_string = self.generate_and_persist_file( + packages_json_string = self.generate_and_persist_file( section_title='Generate possible packages', template=template_generate_possible_packages, destination_folder=self.microservice_root_path, - file_name_s=['packages.csv'], + file_name_s=['strategies.json'], system_definition_examples=[], description=self.microservice_specification.task - )['packages.csv'] - packages_list = [[pkg.strip().lower() for pkg in packages_string.split(',')] for packages_string in packages_csv_string.split('\n')] + )['strategies.json'] + packages_list = [[pkg.strip().lower() for pkg in packages] for packages in json.loads(packages_json_string)] + packages_list = [[self.replace_with_gpt_3_5_turbo_if_possible(pkg) for pkg in packages] for packages in packages_list] + packages_list = [ packages for packages in packages_list if len(set(packages).intersection(set(PROBLEMATIC_PACKAGES))) == 0 ] @@ -398,6 +428,7 @@ metas: print_colored('', f'Could not debug the Microservice with any of the approaches: {packages} giving up.', 'red') + return -1 continue print(f''' You can now run or deploy your microservice: @@ -405,7 +436,7 @@ gptdeploy run --path {self.microservice_root_path} gptdeploy deploy --path {self.microservice_root_path} ''' ) - break + return 0 def summarize_error(self, error): conversation = self.gpt_session.get_conversation() @@ -422,8 +453,37 @@ gptdeploy deploy --path {self.microservice_root_path} if not original_task: self.microservice_specification.task = self.get_user_input(pm, 'What should your microservice do?') - self.refine_requirements(pm, system_task_iteration, 'task') - self.refine_requirements(pm, system_test_iteration, 'test') + self.refine_requirements( + pm, + [ + SystemMessage(content=system_task_introduction + system_task_iteration), + ], + 'task', + '', + template_pm_task_iteration, + micro_service_initial_description=f'''Microservice description: +{self.microservice_specification.task} +''', + ) + self.refine_requirements( + pm, + [ + SystemMessage(content=system_task_introduction + system_test_iteration), + ], + 'test', + '''Note that the test scenario must not contain information that was already mentioned in the microservice description. +Note that you must not ask for information that were already mentioned before.''', + template_pm_test_iteration, + micro_service_initial_description=f'''Microservice original description: +``` +{original_task} +``` +Microservice refined description: +``` +{self.microservice_specification.task} +``` +''', + ) break except self.TaskRefinementException as e: @@ -437,19 +497,15 @@ Test scenario: {self.microservice_specification.test} ''') - def refine_requirements(self, pm, template_init, refinement_type): + def refine_requirements(self, pm, messages, refinement_type, custom_suffix, template_pm_iteration, micro_service_initial_description=None): user_input = self.microservice_specification.task - messages = [ - SystemMessage(content=system_task_introduction + template_init), - ] num_parsing_tries = 0 while True: conversation = self.gpt_session.get_conversation(messages, print_stream=os.environ['VERBOSE'].lower() == 'true', print_costs=False) - print('thinking...') agent_response_raw = conversation.chat( - template_refinement.format( - user_input=user_input, - _optional_test=' test' if refinement_type == 'test' else '' + template_pm_iteration.format( + custom_suffix=custom_suffix, + micro_service_initial_description=micro_service_initial_description if len(messages) == 1 else '', ), role='user' ) @@ -457,6 +513,7 @@ Test scenario: agent_question = self.extract_content_from_result(agent_response_raw, 'prompt.txt', can_contain_code_block=False) final = self.extract_content_from_result(agent_response_raw, 'final.txt', can_contain_code_block=False) if final: + messages.append(AIMessage(content=final)) setattr(self.microservice_specification, refinement_type, final) break elif agent_question: @@ -477,3 +534,12 @@ Test scenario: while not val: val = input('you: ') return val + + @staticmethod + def replace_with_gpt_3_5_turbo_if_possible(pkg): + if pkg in ['allennlp', 'bertopic', 'fasttext', 'flair', 'gensim', 'nltk', + 'pattern', 'polyglot', 'pytorch-transformers', 'rasa', 'sentence-transformers', + 'spacy', 'stanza', 'textblob', 'textstat', 'transformers']: + + return 'gpt_3_5_turbo_api' + return pkg diff --git a/src/options/generate/static_files/microservice/Dockerfile b/src/options/generate/static_files/microservice/Dockerfile index 190e8ff..d0de26d 100644 --- a/src/options/generate/static_files/microservice/Dockerfile +++ b/src/options/generate/static_files/microservice/Dockerfile @@ -1,4 +1,6 @@ FROM jinaai/jina:3.14.1-py39-standard +# update pip +RUN pip install --upgrade pip RUN apt-get update && apt-get install --no-install-recommends -y {{apt_get_packages}} && apt-get clean && rm -rf /var/lib/apt/lists/* diff --git a/src/options/generate/templates_system.py b/src/options/generate/templates_system.py index 41d1cba..7dabc54 100644 --- a/src/options/generate/templates_system.py +++ b/src/options/generate/templates_system.py @@ -28,11 +28,11 @@ You must not output anything else than what you got told in the following steps. 1. You must create a check list for the requirements of the microservice. Input and output have to be accurately specified. -You must use the following format (insert ✅, ❌ or n/a) depending on whether the requirement is fulfilled, not fulfilled or not applicable: -input: -output: -api access: -database access: +You must use the following format (insert defined, not defined or n/a) depending on whether the requirement is fulfilled, not fulfilled or not applicable: +input: +output: +api access: +database access: 2. You must do either a or b. @@ -59,9 +59,9 @@ The character sequence ``` must always be at the beginning of the line. You must not add information that was not provided by the client. Example for the description "given a city, get the weather report for the next 5 days": -input: ✅ -output: ✅ -api access: ❌ +input: defined +output: defined +api access: not defined database access: n/a **prompt.txt** @@ -70,8 +70,8 @@ Please provide the url of the weather api and a valid api key or some other way ``` Example for the description "convert png to svg": -input: ✅ -output: ✅ +input: defined +output: defined api access: n/a database access: n/a @@ -81,8 +81,8 @@ The user inserts a png and gets an svg as response. ``` Example for the description "parser": -input: ❌ -output: ❌ +input: not defined +output: not defined api access: n/a database access: n/a @@ -96,71 +96,70 @@ system_test_iteration = f''' The client gives you a description of the microservice (web service). Your task is to describe verbally a unit test for that microservice. There are two cases: -a) If unit test requires an example input file as input: -In this case you must ask the client to provide the example input file as URL. +a) If no example input is provided in the description, then you must ask the client to provide an example input file URL or example string depending on the use-case. You must not accept files that are not URLs. +You must not ask for an example input in case the input can be determined from the conversation with the client. Your response must exactly match the following block code format (double asterisks for the file name and triple backticks for the file block): +1. +contains example: no +2. **prompt.txt** ```text ``` If you did a, you must not do b. -b) Any strings, ints, or bools can be used as input for the unit test. +b) If the input can be determined from the previous messages: In this case you must describe the unit test verbally. Your response must exactly match the following block code format (double asterisks for the file name and triple backticks for the file block): +1. +contains example: yes () +2. **final.txt** ```text - +input: "" +assertion: the output is of type ``` If you did b, you must not do a. -Example 1: -Client: -**client-response.txt** -``` -given a city, get the weather report for the next 5 days using OpenWeatherMap with the api key b6907d289e10d714a6e88b30761fae22 -``` -PM: +Example for: "given a city like "Berlin", get the weather report for the next 5 days using OpenWeatherMap with the api key b6907d289e10d714a6e88b30761fae22": +1. +contains example: yes (Berlin) +2. **final.txt** ```text -The test takes the city "Berlin" as input and asserts that the weather report for the next 5 days exists in the response. +input: "Berlin" +assertion: the output is of type string ``` -Example 2: -Client: -**client-response.txt** -``` -The user inserts a png and gets an svg as response. -``` -PM: +Example for "The user inserts a png and gets an svg as response.": +1. +contains example: no +2. **prompt.txt** ```text Please provide a png example input file as url. ``` -Client: -**client-response.txt** -``` -https://aquasecurity.github.io/kube-bench/v0.6.5/images/kube-bench-logo-only.png -``` -PM: + +Example for "The user inserts a png like https://aquasecurity.github.io/kube-bench/v0.6.5/images/kube-bench-logo-only.png and gets an svg as response.": +1. +contains example: yes (https://aquasecurity.github.io/kube-bench/v0.6.5/images/kube-bench-logo-only.png) +2. **final.txt** ```text -The test takes the png https://aquasecurity.github.io/kube-bench/v0.6.5/images/kube-bench-logo-only.png as input and asserts the output is an svg. +input: "https://aquasecurity.github.io/kube-bench/v0.6.5/images/kube-bench-logo-only.png" +assertion: the output is of type svg ``` -Example 3: -Client: -**client-response.txt** -``` -The microservice takes nothing as input and returns the current time. -``` -PM: +Example for "The microservice takes nothing as input and returns the current time.": +1. +contains example: n/a **final.txt** ```text -The test takes nothing as input and asserts that the output is a string. +input: "nothing" +assertion: the output is of type string ``` ''' diff --git a/src/options/generate/templates_user.py b/src/options/generate/templates_user.py index 9e078c3..a505435 100644 --- a/src/options/generate/templates_user.py +++ b/src/options/generate/templates_user.py @@ -24,8 +24,7 @@ The implemented function and the test must not contain prototype or placeholder The implemented function and the test must run in a docker container based on debian. The implemented function and the test must not use libraries like Flask. The implemented function and the test must not have a __main__ function. -The implemented function and the test must use gpt_3_5_turbo_api if the task requires understanding or generation of natural language or using any language model. Other language models are not allowed. -The implemented function and the test must not use gpt_3_5_turbo_api or any other language model if the task does not require understanding or generation of natural language.''' +The implemented function and the test must use gpt_3_5_turbo_api if the task requires understanding, generation, summarization, transformation, translation or classification of natural language or using any language model. Other language models are not allowed.''' template_generate_microservice_name = PromptTemplate.from_template( @@ -45,24 +44,30 @@ PDFParserExecutor ) -template_generate_possible_packages_output_format_string = '''You must output the package combinations as a list of lists wrapped into ``` and name it **packages.csv**. Do not use quotation marks around packages names in the output. Separate packages in a combination by comma. The output looks this: -**{file_name}** +template_generate_possible_packages_output_format_string = '''You must output the package combinations as a \ +list of lists wrapped into ``` and name it **strategies.json**. \ +Do not use quotation marks around packages names in the output. \ +Separate packages in a combination by comma. \ +Note that you can also leave a line empty to indicate that one of the strategies does not require any package and can be done in plain python. +The output looks like this: +**strategies.json** ``` -package1a, package1b ... -package2a, package2b, package2c -package3a ... -package4a ... -package5a ... -... +[ + ["package1", "package2", "package3"], + ["package4", "package5"], + ["package6", "package7", "package8", "package9"], + [], + ["package10"] +] ```''' template_generate_possible_packages = PromptTemplate.from_template( '''Here is the task description of the problem you need to solve: "{description}" -1. Write down the different strategies to solve the task. For each strategy write down all the non-trivial subtasks you need to solve. If there is a natural language understanding or generation stragegy, write it down. +1. Write down ut to 3 different strategies to solve the task. For each strategy write down all the non-trivial subtasks you need to solve. If there is a natural language understanding or generation stragegy, write it down. 2. Find out what is the core problem to solve. -3. List up to 15 Python packages that are specifically designed or have functionalities to solve the complete core problem with one of the defined strategies. You must add gpt_3_5_turbo_api if the task involves generating or understanding natural language or using a (pre-trained) language model. +3. List up to 10 Python packages that are specifically designed or have functionalities to solve the complete core problem with one of the defined strategies. You must add gpt_3_5_turbo_api if the task involves generating or understanding natural language or using a (pre-trained) language model. 4. Exclude any package that can generate or understand natural language or enables using any language model, but you must not exclude gpt_3_5_turbo_api. Print the cleaned list of packages and give a brief reason for keeping it after its name. 5. For each cleaned package think if it fulfills the following requirements: a) specifically designed or have functionalities to solve the complete core problem. @@ -152,13 +157,10 @@ template_generate_requirements = PromptTemplate.from_template( {code_files_wrapped} -Write the content of the requirements.txt file. -The requirements.txt file must include the following packages in that specified version: +Write the content of the requirements.txt file like this: +**requirements.txt** ``` -jina==3.15.1.dev14 -docarray==0.21.0 -openai>=0.26.0 -pytest +... ``` Add any more packages that are needed to run the code. You must not add gpt_3_5_turbo_api to the requirements.txt file. @@ -177,7 +179,27 @@ Name all packages which need to be installed via `apt-get install` in above Dock {requirements_file_wrapped} -Output them as a white space separated list:''' +Note that you must not list apt-get packages that are already installed in the Dockerfile. +Note that openai does not require any apt-get packages. +Note that you are only allowed to list packages where you are highly confident that they are really needed. +Note that you can assume that the standard python packages are already installed. +Output the packages that need to me placed at {{apt_get_packages}} as json in the following format: +**apt-get-packages.json** +```json +{{"packages": ["", ""]}} +``` +Example for the following requirements.txt file: +**requirements.txt** +``` +numpy==1.19.5 +fitz +``` +The output would be: +**apt-get-packages.json** +```json +{{"packages": []}} +``` +''' ) @@ -349,14 +371,9 @@ The playground (app.py) must not import the executor. ''' ) -# Create a wrapper around google called Joogle. It modifies the page summary preview text of the search results to insert the word Jina as much as possible. -template_refinement = PromptTemplate.from_template( - ''' +template_pm_task_iteration = PromptTemplate.from_template( + '''{micro_service_initial_description} 1.Quickly go through the checklist (input/output well defined? api or db access needed?) and think about if you should ask something to the client or if you should write the final description. -**client-response.txt** -```text -{user_input} -``` 2.Either write the prompt.txt or the final.txt file. Either ask for clarification like this: **prompt.txt** @@ -364,14 +381,52 @@ Either ask for clarification like this: ``` -Or write the summarized microservice{_optional_test} description like this: +Or write the summarized microservice description like this: **final.txt** ```text - -``` + +``` Note that your response must be either prompt.txt or final.txt. You must not write both. Note that you must obey the double asterisk and tripple backtick syntax from above. +Note that the last sequence of characters in your response must be ``` (triple backtick). Note that prompt.txt must not only contain one question. Note that if urls, secrets, database names, etc. are mentioned, they must be part of the summary. +{custom_suffix} +''' +) + +template_pm_test_iteration = PromptTemplate.from_template( + '''{micro_service_initial_description} +1. write down if the original description and the refined description contain an example input for the microservice. +2. write down either prompt.txt or final.txt. +If the example input for the microservice is mentioned in the refined description or the original description, then output final.txt. +Otherwise, output prompt.txt where you ask for the example input file as URL or the example string. +Except for urls, you should come up with your own example input that makes sense for the microservice description. + +Example for the case where an example input file is required and was not mentioned before: +**prompt.txt** +```text +Can you please provide an example input file as URL? +``` + +Example for the case where the example input string is required and was not mentioned before: +**prompt.txt** +```text +Can you please provide an example input string? +``` +Note that you must not ask for an example input in case the example input is already mentioned in the refined description or the original description. + +Example for the case where the example is already mentioned in the refined description or the original description: +**final.txt** +```text +input: +assertion: the output is of type +``` +Note that your response must be either prompt.txt or final.txt. You must not write both. +Note that you must obey the double asterisk and tripple backtick syntax from above. +Note that the last sequence of characters in your response must be ``` (triple backtick). +Note that your response must start with the character sequence ** (double asterisk). +Note that prompt.txt must only contain one question. +{custom_suffix} ''' ) diff --git a/test/test_generator.py b/test/test_generator.py index 098dc18..cac1950 100644 --- a/test/test_generator.py +++ b/test/test_generator.py @@ -23,9 +23,14 @@ def test_generation_level_0(tmpdir): str(tmpdir) + 'microservice', 'gpt-3.5-turbo' ) - generator.generate() + assert generator.generate() == 0 + + +# fixture +@pytest.fixture +def tmpdir(): + return 'microservice' -@pytest.mark.skip(reason="not possible") def test_generation_level_1(tmpdir): """ Requirements: @@ -38,22 +43,16 @@ def test_generation_level_1(tmpdir): """ os.environ['VERBOSE'] = 'true' generator = Generator( - ''' -Input is a tweet that might contain passive aggressive language like: -'When your coworker microwaves fish in the break room... AGAIN. 🐟🤢 But hey, at least SOMEONE's enjoying their lunch. #officelife' -The output is a tweet that is not passive aggressive like: -'Hi coworker, -I hope you're having an amazing day! -Just a quick note: sometimes microwaving fish can create an interesting aroma in the break room. -If you're up for trying different lunch options, that could be a fun way to mix things up. -Enjoy your day! #variety' -''', + '''Input is a tweet that might contain passive aggressive language. The output is the positive version of that tweet. +Example tweet: +\'When your coworker microwaves fish in the break room... AGAIN. 🐟🤢 +But hey, at least SOMEONE's enjoying their lunch. #officelife\'''', str(tmpdir) + 'microservice', 'gpt-3.5-turbo' ) - generator.generate() + assert generator.generate() == 0 + -@pytest.mark.skip(reason="not possible") def test_generation_level_2(tmpdir): """ Requirements: @@ -66,11 +65,12 @@ def test_generation_level_2(tmpdir): """ os.environ['VERBOSE'] = 'true' generator = Generator( - "The input is a PDF like https://www.africau.edu/images/default/sample.pdf and the output the summarized text.", + "The input is a PDF like https://www.africau.edu/images/default/sample.pdf and the output the summarized text (50 words).", str(tmpdir) + 'microservice', 'gpt-3.5-turbo' ) - generator.generate() + assert generator.generate() == 0 + @pytest.mark.skip(reason="not possible") def test_generation_level_3(tmpdir): @@ -103,7 +103,7 @@ Create an audio file of the summarized text. str(tmpdir) + 'microservice', 'gpt-3.5-turbo' ) - generator.generate() + assert generator.generate() == 0 @pytest.mark.skip(reason="not possible") def test_generation_level_4(tmpdir): @@ -141,4 +141,8 @@ The output is the image with the joke on it.''', str(tmpdir) + 'microservice', 'gpt-3.5-turbo' ) - generator.generate() + assert generator.generate() == 0 + + +# further ideas: +# Create a wrapper around google called Joogle. It modifies the page summary preview text of the search results to insert the word Jina as much as possible. \ No newline at end of file