mirror of
https://github.com/aljazceru/dev-gpt.git
synced 2025-12-26 18:04:24 +01:00
Merge branch 'main' of github.com:jina-ai/microchain into feat_pm_role
Conflicts: src/apis/gpt.py src/options/generate/generator.py
This commit is contained in:
@@ -26,7 +26,7 @@ Your imagination is the limit!
|
||||
<a href="https://github.com/tiangolo/gptdeploy/actions?query=workflow%3ATest+event%3Apush+branch%3Amaster" target="_blank">
|
||||
<img src="https://img.shields.io/badge/platform-mac%20%7C%20linux%20%7C%20windows-blue" alt="Supported platforms">
|
||||
</a>
|
||||
<a href="https://pypi.org/project/gptdeploy" target="_blank">
|
||||
<a href="https://pypistats.org/packages/gptdeploy" target="_blank">
|
||||
<img src="https://img.shields.io/pypi/dm/gptdeploy?color=%2334D058&label=pypi%20downloads" alt="Downloads">
|
||||
</a>
|
||||
<a href="https://discord.gg/ESn8ED6Fyn" target="_blank">
|
||||
|
||||
@@ -142,3 +142,21 @@ class _GPTConversation:
|
||||
self.messages.append(response)
|
||||
return response.content
|
||||
|
||||
@staticmethod
|
||||
def _create_system_message(task_description, test_description, system_definition_examples: List[str] = []) -> SystemMessage:
|
||||
if system_definition_examples is None:
|
||||
return None
|
||||
|
||||
system_message = PromptTemplate.from_template(template_system_message_base).format(
|
||||
task_description=task_description,
|
||||
test_description=test_description,
|
||||
)
|
||||
if 'gpt' in system_definition_examples:
|
||||
system_message += f'\n{gpt_example}'
|
||||
if 'executor' in system_definition_examples:
|
||||
system_message += f'\n{executor_example}'
|
||||
if 'docarray' in system_definition_examples:
|
||||
system_message += f'\n{docarray_example}'
|
||||
if 'client' in system_definition_examples:
|
||||
system_message += f'\n{client_example}'
|
||||
return SystemMessage(content=system_message)
|
||||
|
||||
@@ -71,8 +71,8 @@ def generate(
|
||||
return
|
||||
|
||||
from src.options.generate.generator import Generator
|
||||
generator = Generator(description, test, model=model)
|
||||
generator.generate(path)
|
||||
generator = Generator(description, test, path=path, model=model)
|
||||
generator.generate()
|
||||
|
||||
@openai_api_key_needed
|
||||
@main.command()
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import os
|
||||
|
||||
def listdir_no_hidden(path):
|
||||
def list_dirs_no_hidden(path):
|
||||
"""
|
||||
List all non-hidden files and directories in the specified path.
|
||||
List all non-hidden directories in the specified path.
|
||||
|
||||
:param path: str, optional (default is '.')
|
||||
The path to the directory you want to list files and directories from.
|
||||
:return: list
|
||||
A list of filenames and directory names that are not hidden.
|
||||
A list of directory names that are not hidden.
|
||||
"""
|
||||
return [entry for entry in os.listdir(path) if not entry.startswith('.')]
|
||||
return [entry for entry in os.listdir(path) if not entry.startswith('.') and os.path.isdir(os.path.join(path, entry))]
|
||||
|
||||
|
||||
def get_latest_folder(path, max_fn=max):
|
||||
return max_fn([os.path.join(path, f) for f in listdir_no_hidden(path) if os.path.isdir(os.path.join(path, f))])
|
||||
return max_fn([os.path.join(path, f) for f in list_dirs_no_hidden(path)])
|
||||
|
||||
def version_max_fn(path_list):
|
||||
version_list = [int(os.path.split(path)[-1].replace('v', '')) for path in path_list]
|
||||
@@ -37,9 +37,9 @@ def validate_folder_is_correct(microservice_path):
|
||||
raise ValueError(f'Path {microservice_path} does not exist')
|
||||
if not os.path.isdir(microservice_path):
|
||||
raise ValueError(f'Path {microservice_path} is not a directory')
|
||||
if len(listdir_no_hidden(microservice_path)) == 0:
|
||||
if len(list_dirs_no_hidden(microservice_path)) == 0:
|
||||
raise ValueError(f'Path {microservice_path} is empty. Please generate a microservice first. Type `gptdeploy generate` for further instructions.')
|
||||
if len(listdir_no_hidden(microservice_path)) > 1:
|
||||
if len(list_dirs_no_hidden(microservice_path)) > 1:
|
||||
raise ValueError(f'Path {microservice_path} needs to contain only one folder. Please make sure that you only have one microservice in this folder.')
|
||||
latest_version_path = get_latest_version_path(microservice_path)
|
||||
required_files = [
|
||||
|
||||
@@ -2,6 +2,9 @@ import os
|
||||
import random
|
||||
import re
|
||||
import shutil
|
||||
from typing import List, Callable, Union
|
||||
|
||||
from langchain import PromptTemplate
|
||||
from typing import List, Text, Optional
|
||||
|
||||
from langchain import PromptTemplate
|
||||
@@ -18,6 +21,10 @@ from src.options.generate.templates_system import template_system_message_base,
|
||||
from src.options.generate.templates_user import template_generate_microservice_name, \
|
||||
template_generate_possible_packages, \
|
||||
template_solve_code_issue, \
|
||||
template_solve_pip_dependency_issue, template_is_dependency_issue, template_generate_playground, \
|
||||
template_generate_executor, template_generate_test, template_generate_requirements, \
|
||||
template_chain_of_thought, template_summarize_error, \
|
||||
template_generate_apt_get_install, template_solve_apt_get_dependency_issue
|
||||
template_solve_dependency_issue, template_is_dependency_issue, template_generate_playground, \
|
||||
template_generate_executor, template_generate_test, template_generate_requirements, template_generate_dockerfile, \
|
||||
template_chain_of_thought, template_summarize_error, template_generate_possible_packages_output_format_string
|
||||
@@ -32,9 +39,10 @@ class TaskSpecification:
|
||||
test: Optional[Text]
|
||||
|
||||
class Generator:
|
||||
def __init__(self, task_description, test_description, model='gpt-4'):
|
||||
def __init__(self, task_description, test_description, path, model='gpt-4'):
|
||||
self.gpt_session = gpt.GPTSession(task_description, test_description, model=model)
|
||||
self.microservice_specification = TaskSpecification(task=task_description, test=test_description)
|
||||
self.microservice_root_path = path
|
||||
|
||||
def extract_content_from_result(self, plain_text, file_name, match_single_block=False, can_contain_code_block=True):
|
||||
optional_line_break = '\n' if can_contain_code_block else '' # the \n at the end makes sure that ``` within the generated code is not matched because it is not right before a line break
|
||||
@@ -67,49 +75,70 @@ metas:
|
||||
all_microservice_files_string += f'**{file_name}**\n```{tag}\n{file_name_to_content[file_name]}\n```\n\n'
|
||||
return all_microservice_files_string.strip()
|
||||
|
||||
def get_default_parse_result_fn(self, files_names: List[str]):
|
||||
def _default_parse_result_fn(x):
|
||||
_parsed_results = {}
|
||||
for _file_name in files_names:
|
||||
_content = self.extract_content_from_result(x, _file_name, match_single_block=len(files_names)==1)
|
||||
if _content != '':
|
||||
_parsed_results[_file_name] = _content
|
||||
return _parsed_results
|
||||
return _default_parse_result_fn
|
||||
|
||||
def generate_and_persist_file(
|
||||
self,
|
||||
section_title,
|
||||
template,
|
||||
destination_folder=None,
|
||||
file_name=None,
|
||||
section_title: str,
|
||||
template: PromptTemplate,
|
||||
destination_folder: str,
|
||||
file_name_s: Union[str, List[str]] = None,
|
||||
parse_result_fn: Callable = None,
|
||||
system_definition_examples: List[str] = ['gpt', 'executor', 'docarray', 'client'],
|
||||
**template_kwargs
|
||||
):
|
||||
"""This function generates file(s) using the given template and persists it/them in the given destination folder.
|
||||
It also returns the generated content as a dictionary mapping file_name to its content.
|
||||
|
||||
Args:
|
||||
section_title (str): The title of the section to be printed in the console.
|
||||
template (PromptTemplate): The template to be used for generating the file(s).
|
||||
destination_folder (str): The destination folder where the generated file(s) should be persisted.
|
||||
file_name_s (Union[str, List[str]], optional): The name of the file(s) to be generated. Defaults to None.
|
||||
parse_result_fn (Callable, optional): A function that parses the generated content and returns a dictionary
|
||||
mapping file_name to its content. If no content could be extract, it returns an empty dictionary.
|
||||
Defaults to None. If None, default parsing is used which uses the file_name to extract from the generated content.
|
||||
system_definition_examples (List[str], optional): The system definition examples to be used for the conversation.
|
||||
Defaults to ['gpt', 'executor', 'docarray', 'client'].
|
||||
**template_kwargs: The keyword arguments to be passed to the template.
|
||||
"""
|
||||
Generates a file using the GPT-3 API and persists it to the destination folder if specified.
|
||||
In case the content is not properly generated, it retries the generation.
|
||||
It returns the generated content.
|
||||
"""
|
||||
if parse_result_fn is None:
|
||||
parse_result_fn = self.get_default_parse_result_fn([file_name_s] if isinstance(file_name_s, str) else file_name_s)
|
||||
|
||||
print_colored('', f'\n\n############# {section_title} #############', 'blue')
|
||||
system_introduction_message = self._create_system_message(self.microservice_specification.task, self.microservice_specification.test, system_definition_examples)
|
||||
conversation = self.gpt_session.get_conversation(messages=[system_introduction_message])
|
||||
template_kwargs = {k: v for k, v in template_kwargs.items() if k in template.input_variables}
|
||||
if 'file_name' in template.input_variables:
|
||||
template_kwargs['file_name'] = file_name_s
|
||||
content_raw = conversation.chat(
|
||||
template.format(
|
||||
file_name=file_name,
|
||||
**template_kwargs
|
||||
)
|
||||
)
|
||||
content = self.extract_content_from_result(content_raw, file_name, match_single_block=True)
|
||||
if content == '':
|
||||
content_raw = conversation.chat(f'You must add the content for {file_name}.')
|
||||
content = self.extract_content_from_result(
|
||||
content_raw, file_name, match_single_block=True
|
||||
)
|
||||
if destination_folder:
|
||||
persist_file(content, os.path.join(destination_folder, file_name))
|
||||
content = parse_result_fn(content_raw)
|
||||
if content == {}:
|
||||
content_raw = conversation.chat('You must add the content' + (f' for {file_name_s}.' if file_name_s else ''))
|
||||
content = parse_result_fn(content_raw)
|
||||
for _file_name, _file_content in content.items():
|
||||
persist_file(_file_content, os.path.join(destination_folder, _file_name))
|
||||
return content
|
||||
|
||||
def generate_microservice(
|
||||
self,
|
||||
path,
|
||||
microservice_name,
|
||||
packages,
|
||||
num_approach,
|
||||
):
|
||||
MICROSERVICE_FOLDER_v1 = get_microservice_path(path, microservice_name, packages, num_approach, 1)
|
||||
MICROSERVICE_FOLDER_v1 = get_microservice_path(self.microservice_root_path, microservice_name, packages, num_approach, 1)
|
||||
os.makedirs(MICROSERVICE_FOLDER_v1)
|
||||
|
||||
microservice_content = self.generate_and_persist_file(
|
||||
@@ -122,53 +151,60 @@ metas:
|
||||
packages=packages,
|
||||
file_name_purpose=EXECUTOR_FILE_NAME,
|
||||
tag_name=EXECUTOR_FILE_TAG,
|
||||
file_name=EXECUTOR_FILE_NAME,
|
||||
)
|
||||
file_name_s=EXECUTOR_FILE_NAME,
|
||||
)[EXECUTOR_FILE_NAME]
|
||||
|
||||
test_microservice_content = self.generate_and_persist_file(
|
||||
'Test Microservice',
|
||||
template_generate_test,
|
||||
MICROSERVICE_FOLDER_v1,
|
||||
code_files_wrapped=self.files_to_string({'microservice.py': microservice_content}),
|
||||
code_files_wrapped=self.files_to_string({EXECUTOR_FILE_NAME: microservice_content}),
|
||||
microservice_name=microservice_name,
|
||||
microservice_description=self.microservice_specification.task,
|
||||
test_description=self.microservice_specification.test,
|
||||
file_name_purpose=TEST_EXECUTOR_FILE_NAME,
|
||||
tag_name=TEST_EXECUTOR_FILE_TAG,
|
||||
file_name=TEST_EXECUTOR_FILE_NAME,
|
||||
)
|
||||
file_name_s=TEST_EXECUTOR_FILE_NAME,
|
||||
)[TEST_EXECUTOR_FILE_NAME]
|
||||
|
||||
requirements_content = self.generate_and_persist_file(
|
||||
'Requirements',
|
||||
template_generate_requirements,
|
||||
MICROSERVICE_FOLDER_v1,
|
||||
code_files_wrapped=self.files_to_string({
|
||||
'microservice.py': microservice_content,
|
||||
'test_microservice.py': test_microservice_content,
|
||||
EXECUTOR_FILE_NAME: microservice_content,
|
||||
TEST_EXECUTOR_FILE_NAME: test_microservice_content,
|
||||
}),
|
||||
file_name_purpose=REQUIREMENTS_FILE_NAME,
|
||||
file_name=REQUIREMENTS_FILE_NAME,
|
||||
file_name_s=REQUIREMENTS_FILE_NAME,
|
||||
tag_name=REQUIREMENTS_FILE_TAG,
|
||||
)
|
||||
)[REQUIREMENTS_FILE_NAME]
|
||||
|
||||
self.generate_and_persist_file(
|
||||
'Dockerfile',
|
||||
template_generate_dockerfile,
|
||||
MICROSERVICE_FOLDER_v1,
|
||||
code_files_wrapped=self.files_to_string({
|
||||
'microservice.py': microservice_content,
|
||||
'test_microservice.py': test_microservice_content,
|
||||
'requirements.txt': requirements_content,
|
||||
}),
|
||||
file_name_purpose=DOCKER_FILE_NAME,
|
||||
file_name=DOCKER_FILE_NAME,
|
||||
tag_name=DOCKER_FILE_TAG,
|
||||
section_title='Generate Dockerfile',
|
||||
template=template_generate_apt_get_install,
|
||||
destination_folder=MICROSERVICE_FOLDER_v1,
|
||||
file_name_s=None,
|
||||
parse_result_fn=self.parse_result_fn_dockerfile,
|
||||
docker_file_wrapped=self.read_docker_template(),
|
||||
requirements_file_wrapped=self.files_to_string({
|
||||
REQUIREMENTS_FILE_NAME: requirements_content,
|
||||
})
|
||||
)
|
||||
|
||||
self.write_config_yml(microservice_name, MICROSERVICE_FOLDER_v1)
|
||||
|
||||
print('\nFirst version of the microservice generated. Start iterating on it to make the tests pass...')
|
||||
|
||||
@staticmethod
|
||||
def read_docker_template():
|
||||
with open(os.path.join(os.path.dirname(__file__), 'static_files', 'microservice', 'Dockerfile'), 'r') as f:
|
||||
return f.read()
|
||||
|
||||
def parse_result_fn_dockerfile(self, content_raw: str):
|
||||
docker_file_template = self.read_docker_template()
|
||||
return {DOCKER_FILE_NAME: docker_file_template.replace('{{apt_get_packages}}', '{apt_get_packages}').format(apt_get_packages=content_raw)}
|
||||
|
||||
def generate_playground(self, microservice_name, microservice_path):
|
||||
print_colored('', '\n\n############# Playground #############', 'blue')
|
||||
|
||||
@@ -219,13 +255,12 @@ metas:
|
||||
if not is_executor_in_hub(gateway_name):
|
||||
raise Exception(f'{microservice_name} not in hub. Hubble logs: {hubble_log}')
|
||||
|
||||
|
||||
def debug_microservice(self, path, microservice_name, num_approach, packages):
|
||||
def debug_microservice(self, microservice_name, num_approach, packages):
|
||||
for i in range(1, MAX_DEBUGGING_ITERATIONS):
|
||||
print('Debugging iteration', i)
|
||||
print('Trying to debug the microservice. Might take a while...')
|
||||
previous_microservice_path = get_microservice_path(path, microservice_name, packages, num_approach, i)
|
||||
next_microservice_path = get_microservice_path(path, microservice_name, packages, num_approach, i + 1)
|
||||
previous_microservice_path = get_microservice_path(self.microservice_root_path, microservice_name, packages, num_approach, i)
|
||||
next_microservice_path = get_microservice_path(self.microservice_root_path, microservice_name, packages, num_approach, i + 1)
|
||||
log_hubble = push_executor(previous_microservice_path)
|
||||
error = process_error_message(log_hubble)
|
||||
if error:
|
||||
@@ -242,90 +277,113 @@ metas:
|
||||
else:
|
||||
raise Exception(f'{microservice_name} not in hub. Hubble logs: {log_hubble}')
|
||||
|
||||
|
||||
return get_microservice_path(path, microservice_name, packages, num_approach, i)
|
||||
return get_microservice_path(self.microservice_root_path, microservice_name, packages, num_approach, i)
|
||||
|
||||
def do_debug_iteration(self, error, next_microservice_path, previous_microservice_path):
|
||||
os.makedirs(next_microservice_path)
|
||||
file_name_to_content = get_all_microservice_files_with_content(previous_microservice_path)
|
||||
|
||||
summarized_error = self.summarize_error(error)
|
||||
is_dependency_issue = self.is_dependency_issue(error, file_name_to_content['Dockerfile'])
|
||||
if is_dependency_issue:
|
||||
all_files_string = self.files_to_string({
|
||||
key: val for key, val in file_name_to_content.items() if
|
||||
key in ['requirements.txt', 'Dockerfile']
|
||||
})
|
||||
user_query = template_solve_dependency_issue.format(
|
||||
summarized_error=summarized_error, all_files_string=all_files_string,
|
||||
)
|
||||
else:
|
||||
user_query = template_solve_code_issue.format(
|
||||
task_description=self.microservice_specification.task, test_description=self.microservice_specification.test,
|
||||
summarized_error=summarized_error, all_files_string=self.files_to_string(file_name_to_content),
|
||||
)
|
||||
conversation = self.gpt_session.get_conversation()
|
||||
returned_files_raw = conversation.chat(user_query)
|
||||
for file_name, tag in FILE_AND_TAG_PAIRS:
|
||||
updated_file = self.extract_content_from_result(returned_files_raw, file_name)
|
||||
if updated_file and (not is_dependency_issue or file_name in ['requirements.txt', 'Dockerfile']):
|
||||
file_name_to_content[file_name] = updated_file
|
||||
print(f'Updated {file_name}')
|
||||
for file_name, content in file_name_to_content.items():
|
||||
persist_file(content, os.path.join(next_microservice_path, file_name))
|
||||
|
||||
summarized_error = self.summarize_error(error)
|
||||
dock_req_string = self.files_to_string({
|
||||
key: val for key, val in file_name_to_content.items() if
|
||||
key in ['requirements.txt', 'Dockerfile']
|
||||
})
|
||||
|
||||
is_apt_get_dependency_issue = self.is_dependency_issue(summarized_error, dock_req_string, 'apt-get')
|
||||
if is_apt_get_dependency_issue:
|
||||
self.generate_and_persist_file(
|
||||
section_title='Debugging apt-get dependency issue',
|
||||
template=template_solve_apt_get_dependency_issue,
|
||||
destination_folder=next_microservice_path,
|
||||
file_name_s=None,
|
||||
parse_result_fn=self.parse_result_fn_dockerfile,
|
||||
system_definition_examples=None,
|
||||
summarized_error=summarized_error,
|
||||
all_files_string=dock_req_string,
|
||||
)
|
||||
print('Dockerfile updated')
|
||||
else:
|
||||
is_pip_dependency_issue = self.is_dependency_issue(summarized_error, dock_req_string, 'PIP')
|
||||
if is_pip_dependency_issue:
|
||||
self.generate_and_persist_file(
|
||||
section_title='Debugging pip dependency issue',
|
||||
template=template_solve_pip_dependency_issue,
|
||||
destination_folder=next_microservice_path,
|
||||
file_name_s=REQUIREMENTS_FILE_NAME,
|
||||
summarized_error=summarized_error,
|
||||
all_files_string=dock_req_string,
|
||||
)
|
||||
else:
|
||||
self.generate_and_persist_file(
|
||||
section_title='Debugging code issue',
|
||||
template=template_solve_code_issue,
|
||||
destination_folder=next_microservice_path,
|
||||
file_name_s=[EXECUTOR_FILE_NAME, TEST_EXECUTOR_FILE_NAME, REQUIREMENTS_FILE_NAME],
|
||||
summarized_error=summarized_error,
|
||||
task_description=self.microservice_specification.task,
|
||||
test_description=self.microservice_specification.test,
|
||||
all_files_string=self.files_to_string(file_name_to_content),
|
||||
)
|
||||
|
||||
class MaxDebugTimeReachedException(BaseException):
|
||||
pass
|
||||
|
||||
def is_dependency_issue(self, error, docker_file: str):
|
||||
def is_dependency_issue(self, summarized_error, dock_req_string: str, package_manager: str):
|
||||
# a few heuristics to quickly jump ahead
|
||||
if any([error_message in error for error_message in ['AttributeError', 'NameError', 'AssertionError']]):
|
||||
if any([error_message in summarized_error for error_message in ['AttributeError', 'NameError', 'AssertionError']]):
|
||||
return False
|
||||
if package_manager.lower() == 'pip' and any([em in summarized_error for em in ['ModuleNotFoundError', 'ImportError']]):
|
||||
return True
|
||||
|
||||
print_colored('', 'Is it a dependency issue?', 'blue')
|
||||
conversation = self.gpt_session.get_conversation([])
|
||||
answer = conversation.chat(template_is_dependency_issue.format(error=error, docker_file=docker_file))
|
||||
print_colored('', f'Is it a {package_manager} dependency issue?', 'blue')
|
||||
conversation = self.gpt_session.get_conversation(None)
|
||||
answer = conversation.chat(
|
||||
template_is_dependency_issue.format(summarized_error=summarized_error, all_files_string=dock_req_string).replace('PACKAGE_MANAGER', package_manager)
|
||||
)
|
||||
return 'yes' in answer.lower()
|
||||
|
||||
def generate_microservice_name(self, description):
|
||||
print_colored('', '\n\n############# What should be the name of the Microservice? #############', 'blue')
|
||||
conversation = self.gpt_session.get_conversation()
|
||||
name_raw = conversation.chat(template_generate_microservice_name.format(description=description))
|
||||
name = self.extract_content_from_result(name_raw, 'name.txt')
|
||||
name = self.generate_and_persist_file(
|
||||
section_title='Generate microservice name',
|
||||
template=template_generate_microservice_name,
|
||||
destination_folder=self.microservice_root_path,
|
||||
file_name_s='name.txt',
|
||||
description=description
|
||||
)['name.txt']
|
||||
return name
|
||||
|
||||
def get_possible_packages(self):
|
||||
print_colored('', '\n\n############# What packages to use? #############', 'blue')
|
||||
packages_csv_string = self.generate_and_persist_file(
|
||||
'packages to use',
|
||||
template_generate_possible_packages,
|
||||
None,
|
||||
file_name='packages.csv',
|
||||
section_title='Generate possible packages',
|
||||
template=template_generate_possible_packages,
|
||||
destination_folder=self.microservice_root_path,
|
||||
file_name_s='packages.csv',
|
||||
system_definition_examples=['gpt'],
|
||||
description=self.microservice_specification.task
|
||||
|
||||
)
|
||||
)['packages.csv']
|
||||
packages_list = [[pkg.strip() for pkg in packages_string.split(',')] for packages_string in packages_csv_string.split('\n')]
|
||||
packages_list = packages_list[:NUM_IMPLEMENTATION_STRATEGIES]
|
||||
return packages_list
|
||||
|
||||
def generate(self, microservice_path):
|
||||
self.refine_specification()
|
||||
generated_name = self.generate_microservice_name(self.microservice_specification.task)
|
||||
microservice_name = f'{generated_name}{random.randint(0, 10_000_000)}'
|
||||
packages_list = self.get_possible_packages()
|
||||
packages_list = [
|
||||
packages for packages in packages_list if len(set(packages).intersection(set(PROBLEMATIC_PACKAGES))) == 0
|
||||
]
|
||||
packages_list = [
|
||||
[package for package in packages if package not in UNNECESSARY_PACKAGES] for packages in packages_list
|
||||
]
|
||||
packages_list = packages_list[:NUM_IMPLEMENTATION_STRATEGIES]
|
||||
return packages_list
|
||||
|
||||
def generate(self):
|
||||
os.makedirs(self.microservice_root_path)
|
||||
generated_name = self.generate_microservice_name(self.microservice_specification.task)
|
||||
microservice_name = f'{generated_name}{random.randint(0, 10_000_000)}'
|
||||
packages_list = self.get_possible_packages()
|
||||
for num_approach, packages in enumerate(packages_list):
|
||||
try:
|
||||
self.generate_microservice(microservice_path, microservice_name, packages, num_approach)
|
||||
final_version_path = self.debug_microservice(
|
||||
microservice_path, microservice_name, num_approach, packages
|
||||
)
|
||||
self.generate_microservice(microservice_name, packages, num_approach)
|
||||
final_version_path = self.debug_microservice(microservice_name, num_approach, packages)
|
||||
self.generate_playground(microservice_name, final_version_path)
|
||||
except self.MaxDebugTimeReachedException:
|
||||
print('Could not debug the Microservice with the approach:', packages)
|
||||
@@ -336,14 +394,14 @@ metas:
|
||||
continue
|
||||
print(f'''
|
||||
You can now run or deploy your microservice:
|
||||
gptdeploy run --path {microservice_path}
|
||||
gptdeploy deploy --path {microservice_path}
|
||||
gptdeploy run --path {self.microservice_root_path}
|
||||
gptdeploy deploy --path {self.microservice_root_path}
|
||||
'''
|
||||
)
|
||||
break
|
||||
|
||||
def summarize_error(self, error):
|
||||
conversation = self.gpt_session.get_conversation([])
|
||||
conversation = self.gpt_session.get_conversation(None)
|
||||
error_summary = conversation.chat(template_summarize_error.format(error=error))
|
||||
return error_summary
|
||||
|
||||
|
||||
15
src/options/generate/static_files/microservice/Dockerfile
Normal file
15
src/options/generate/static_files/microservice/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM jinaai/jina:3.14.1-py39-standard
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y {{apt_get_packages}} && apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
## install requirements for the executor
|
||||
COPY requirements.txt .
|
||||
RUN pip install --compile -r requirements.txt
|
||||
|
||||
# setup the workspace
|
||||
COPY . /workdir/
|
||||
WORKDIR /workdir
|
||||
|
||||
RUN pytest test_microservice.py
|
||||
|
||||
ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]
|
||||
@@ -176,22 +176,16 @@ All versions are fixed using ~=, ==, <, >, <=, >=. The package versions must not
|
||||
)
|
||||
|
||||
|
||||
template_generate_dockerfile = PromptTemplate.from_template(
|
||||
general_guidelines_string + '''
|
||||
|
||||
{code_files_wrapped}
|
||||
|
||||
Write the Dockerfile that defines the environment with all necessary dependencies that the executor uses.
|
||||
It is important to make sure that all libs are installed that are required by the python packages.
|
||||
Usually libraries are installed with apt-get.
|
||||
Be aware that the machine the docker container is running on does not have a GPU - only CPU.
|
||||
Add the config.yml file to the Dockerfile.
|
||||
Note that the Dockerfile only has access to the files: microservice.py, requirements.txt, config.yml and test_microservice.py.
|
||||
The base image of the Dockerfile is FROM jinaai/jina:3.15.1-dev14-py39-standard.
|
||||
The entrypoint is ENTRYPOINT ["jina", "executor", "--uses", "config.yml"].
|
||||
Make sure the all files are in the /workdir.
|
||||
The Dockerfile runs the test during the build process.
|
||||
''' + not_allowed_docker_string + '\n' + template_code_wrapping_string
|
||||
template_generate_apt_get_install = PromptTemplate.from_template(
|
||||
'''Given the following Dockerfile:
|
||||
|
||||
{docker_file_wrapped}
|
||||
|
||||
Name all packages which need to be installed via `apt-get install` in above Dockerfile (`{{apt_get_packages}}`) for the following requirements.txt file:
|
||||
|
||||
{requirements_file_wrapped}
|
||||
|
||||
Output them as a white space separated list:'''
|
||||
)
|
||||
|
||||
|
||||
@@ -207,17 +201,17 @@ template_is_dependency_issue = PromptTemplate.from_template(
|
||||
'''Your task is to assist in identifying the root cause of a Docker build error for a python application.
|
||||
The error message is as follows:
|
||||
|
||||
{error}
|
||||
{summarized_error}
|
||||
|
||||
The docker file is as follows:
|
||||
You are given the following files:
|
||||
|
||||
{docker_file}
|
||||
{all_files_string}
|
||||
|
||||
Is this a dependency installation failure? Answer with "yes" or "no".'''
|
||||
Is this a PACKAGE_MANAGER dependency installation failure? Answer with "yes" or "no".'''
|
||||
)
|
||||
|
||||
|
||||
template_solve_dependency_issue = PromptTemplate.from_template(
|
||||
template_solve_pip_dependency_issue = PromptTemplate.from_template(
|
||||
'''Your task is to provide guidance on how to solve an error that occurred during the Docker build process.
|
||||
Here is the summary of the error that occurred:
|
||||
{summarized_error}
|
||||
@@ -225,21 +219,19 @@ Here is the summary of the error that occurred:
|
||||
To solve this error, you should:
|
||||
1. Suggest 3 to 5 possible solutions on how to solve it. You have no access to the documentation of the package.
|
||||
2. Decide for the best solution and explain it in detail.
|
||||
3. Write down the files that need to be changed, but not files that don't need to be changed.
|
||||
3. Write down how requirements.txt should look like to solve the error.
|
||||
For files that need to be changed, you must provide the complete file with the exact same syntax to wrap the code.
|
||||
Obey the following rules:
|
||||
''' + not_allowed_docker_string + '''
|
||||
|
||||
You are given the following files:
|
||||
|
||||
{all_files_string}
|
||||
|
||||
Output all the files that need change. Don't output files that don't need change.
|
||||
Output how the requirements.txt file should look like to solve the error.
|
||||
If you output a file, then write the complete file. Use the exact following syntax to wrap the code:
|
||||
|
||||
**...**
|
||||
**requirements.txt**
|
||||
```
|
||||
...code...
|
||||
...packages...
|
||||
```
|
||||
|
||||
Example:
|
||||
@@ -252,6 +244,21 @@ jina==2.0.0
|
||||
)
|
||||
|
||||
|
||||
template_solve_apt_get_dependency_issue = PromptTemplate.from_template(
|
||||
'''Your task is to provide guidance on how to solve an error that occurred during the Docker build process.
|
||||
Here is the summary of the error that occurred:
|
||||
{summarized_error}
|
||||
|
||||
You are given the following files:
|
||||
|
||||
{all_files_string}
|
||||
|
||||
To solve this error, you should determine the list of packages that need to be installed via `apt-get install` in the Dockerfile.
|
||||
Output them as a white space separated list:'''
|
||||
)
|
||||
|
||||
|
||||
|
||||
template_solve_code_issue = PromptTemplate.from_template(
|
||||
'''General rules:
|
||||
''' + not_allowed_executor_string + '''
|
||||
@@ -275,7 +282,7 @@ To solve this error, you should:
|
||||
Obey the following rules:
|
||||
''' + f'{not_allowed_executor_string}\n{not_allowed_docker_string}' + '''
|
||||
|
||||
Output all the files that need change.
|
||||
Output all the files that need change. You must not change the Dockerfile.
|
||||
Don't output files that don't need change. If you output a file, then write the complete file.
|
||||
If you change microservice.py and it uses gpt_3_5_turbo_api, then you must keep the code for gpt_3_5_turbo_api in the microservice.py file.
|
||||
Use the exact following syntax to wrap the code:
|
||||
|
||||
Reference in New Issue
Block a user