Merge branch 'main' of github.com:jina-ai/microchain into feat-gpt-turbo

 Conflicts:
	src/apis/gpt.py
	src/cli.py
	src/options/generate/generator.py
	src/options/generate/prompt_tasks.py
This commit is contained in:
Florian Hönicke
2023-04-15 01:48:00 +02:00
24 changed files with 465 additions and 267 deletions

View File

@@ -1,2 +1,2 @@
__version__ = '0.18.15'
__version__ = '0.18.16'
from src.cli import main

0
src/apis/__init__.py Normal file
View File

View File

@@ -7,14 +7,14 @@ from openai.error import RateLimitError, Timeout
from src.constants import PRICING_GPT4_PROMPT, PRICING_GPT4_GENERATION, PRICING_GPT3_5_TURBO_PROMPT, \
PRICING_GPT3_5_TURBO_GENERATION
from src.prompt_system import system_base_definition, executor_example, docarray_example, client_example
from src.options.generate.prompt_system import system_base_definition, executor_example, docarray_example, client_example
from src.utils.io import timeout_generator_wrapper, GenerationTimeoutError
from src.utils.string_tools import print_colored
class GPTSession:
def __init__(self, model: str = 'gpt-4'):
self.get_openai_api_key()
self.configure_openai_api_key()
if model == 'gpt-4' and self.is_gpt4_available():
self.supported_model = 'gpt-4'
self.pricing_prompt = PRICING_GPT4_PROMPT
@@ -29,7 +29,7 @@ class GPTSession:
self.chars_prompt_so_far = 0
self.chars_generation_so_far = 0
def get_openai_api_key(self):
def configure_openai_api_key(self):
if 'OPENAI_API_KEY' not in os.environ:
raise Exception('''
You need to set OPENAI_API_KEY in your environment.

View File

@@ -3,17 +3,42 @@ import json
import os
import re
import subprocess
import threading
import time
import webbrowser
from pathlib import Path
import click
import hubble
import requests
from hubble.executor.helper import upload_file, archive_package, get_request_header
from jcloud.flow import CloudFlow
from jina import Flow
from src.utils.io import suppress_stdout
from src.constants import DEMO_TOKEN
from src.utils.io import suppress_stdout, is_docker_running
from src.utils.string_tools import print_colored
def wait_until_app_is_ready(url):
is_app_ready = False
while not is_app_ready:
try:
response = requests.get(url)
print('waiting for app to be ready...')
if response.status_code == 200:
is_app_ready = True
except requests.exceptions.RequestException:
pass
time.sleep(0.5)
def open_streamlit_app():
url = "http://localhost:8081/playground"
wait_until_app_is_ready(url)
webbrowser.open(url, new=2)
def redirect_callback(href):
print(
f'You need login to Jina first to use GPTDeploy\n'
@@ -31,7 +56,7 @@ def jina_auth_login():
If you just created an account, it can happen that the login callback is not working.
In this case, please cancel this run, rerun your gptdeploy command and login into your account again.
''', 'green'
)
)
hubble.login(prompt='login', redirect_callback=redirect_callback)
@@ -51,18 +76,21 @@ def push_executor(dir_path):
'md5sum': md5_digest,
}
with suppress_stdout():
req_header = get_request_header()
headers = get_request_header()
headers['Authorization'] = f'token {DEMO_TOKEN}'
resp = upload_file(
'https://api.hubble.jina.ai/v2/rpc/executor.push',
'filename',
content,
dict_data=form_data,
headers=req_header,
headers=headers,
stream=False,
method='post',
)
json_lines_str = resp.content.decode('utf-8')
if 'AuthenticationRequiredWithBearerChallengeError' in json_lines_str:
raise Exception('The executor is not authorized to be pushed to Jina Cloud.')
if 'exited on non-zero code' not in json_lines_str:
return ''
responses = []
@@ -79,35 +107,104 @@ def push_executor(dir_path):
return '\n'.join(responses)
def get_user_name():
client = hubble.Client(max_retries=None, jsonify=True)
def get_user_name(token=None):
client = hubble.Client(max_retries=None, jsonify=True, token=token)
response = client.get_user_info()
return response['data']['name']
def deploy_on_jcloud(flow_yaml):
def _deploy_on_jcloud(flow_yaml):
cloud_flow = CloudFlow(path=flow_yaml)
return cloud_flow.__enter__().endpoints['gateway']
def deploy_flow(executor_name, dest_folder):
def deploy_on_jcloud(executor_name, microservice_path):
print('Deploy a jina flow')
full_flow_path = create_flow_yaml(microservice_path, executor_name, use_docker=True)
for i in range(3):
try:
host = _deploy_on_jcloud(flow_yaml=full_flow_path)
break
except Exception as e:
print(f'Could not deploy on Jina Cloud. Trying again in 5 seconds. Error: {e}')
time.sleep(5)
except SystemExit as e:
raise SystemExit(f'''
Looks like your free credits ran out.
Please add payment information to your account and try again.
Visit https://cloud.jina.ai/
''') from e
if i == 2:
raise Exception('''
Could not deploy on Jina Cloud.
This can happen when the microservice is buggy, if it requires too much memory or if the Jina Cloud is overloaded.
Please try again later.
'''
)
print(f'''
Your Microservice is deployed.
Run the following command to start the playground:
streamlit run {os.path.join(microservice_path, "app.py")} --server.port 8081 --server.address 0.0.0.0 -- --host {host}
'''
)
return host
def run_streamlit_app(app_path):
subprocess.run(['streamlit', 'run', app_path, 'server.address', '0.0.0.0', '--server.port', '8081', '--', '--host',
'grpc://localhost:8080'])
def run_locally(executor_name, microservice_version_path):
if is_docker_running():
use_docker = True
else:
click.echo('Docker daemon doesn\'t seem to be running. Trying to start it without docker')
use_docker = False
print('Run a jina flow locally')
full_flow_path = create_flow_yaml(microservice_version_path, executor_name, use_docker)
flow = Flow.load_config(full_flow_path)
with flow:
print(f'''
Your microservice started locally.
We now start the playground for you.
''')
app_path = os.path.join(microservice_version_path, "app.py")
# Run the Streamlit app in a separate thread
streamlit_thread = threading.Thread(target=run_streamlit_app, args=(app_path,))
streamlit_thread.start()
# Open the Streamlit app in the user's default web browser
open_streamlit_app()
flow.block()
def create_flow_yaml(dest_folder, executor_name, use_docker):
if use_docker:
prefix = 'jinaai+docker'
else:
prefix = 'jinaai'
flow = f'''
jtype: Flow
with:
name: nowapi
env:
JINA_LOG_LEVEL: DEBUG
port: 8080
jcloud:
version: 3.14.2.dev18
labels:
creator: microchain
name: gptdeploy
executors:
- name: {executor_name.lower()}
uses: jinaai+docker://{get_user_name()}/{executor_name}:latest
env:
JINA_LOG_LEVEL: DEBUG
uses: {prefix}://{get_user_name(DEMO_TOKEN)}/{executor_name}:latest
{"" if use_docker else "install-requirements: True"}
jcloud:
resources:
instance: C2
@@ -117,16 +214,7 @@ executors:
'flow.yml')
with open(full_flow_path, 'w') as f:
f.write(flow)
for i in range(3):
try:
host = deploy_on_jcloud(flow_yaml=full_flow_path)
break
except Exception as e:
raise e
print(f'Flow is deployed create the playground for {host}')
return host
return full_flow_path
def replace_client_line(file_content: str, replacement: str) -> str:

View File

@@ -1,32 +1,84 @@
import functools
import os
import click
from src.executor_factory import ExecutorFactory
from src.jina_cloud import jina_auth_login
from src.key_handling import set_api_key
from src.apis.jina_cloud import jina_auth_login
from src.options.configure.key_handling import set_api_key
def exception_interceptor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
raise type(e)(f'''
{str(e)}
😱😱😱 Sorry for this experience.
Could you please report an issue about this on our github repo? We'll try to fix it asap.
https://github.com/jina-ai/gptdeploy/issues/new
''') from e
return wrapper
def path_param(func):
@click.option('--path', required=True, help='Path to the generated microservice.')
@functools.wraps(func)
def wrapper(*args, **kwargs):
path = os.path.expanduser(kwargs['path'])
path = os.path.abspath(path)
kwargs['path'] = path
return func(*args, **kwargs)
return wrapper
@click.group(invoke_without_command=True)
def main():
jina_auth_login()
@click.pass_context
@exception_interceptor
def main(ctx):
if ctx.invoked_subcommand is None:
click.echo(ctx.get_help())
@main.command()
@click.option('--description', required=True, help='Description of the executor.')
@click.option('--test', required=True, help='Test scenario for the executor.')
@click.option('--num_approaches', default=3, type=int,
help='Number of num_approaches to use to fulfill the task (default: 3).')
@click.option('--output_path', default='executor', help='Path to the output folder (must be empty). ')
@click.option('--description', required=True, help='Description of the microservice.')
@click.option('--test', required=True, help='Test scenario for the microservice.')
@path_param
@click.option('--model', default='gpt-4', help='GPT model to use (default: gpt-4).')
def create(
def generate(
description,
test,
num_approaches=3,
output_path='executor',
path,
model='gpt-4'
):
executor_factory = ExecutorFactory(model=model)
executor_factory.create(description, num_approaches, output_path, test)
from src.options.generate.generator import Generator
path = os.path.expanduser(path)
path = os.path.abspath(path)
if os.path.exists(path):
if os.listdir(path):
click.echo(f"Error: The path {path} you provided via --path is not empty. Please choose a directory that does not exist or is empty.")
return
generator = Generator(model=model)
generator.generate(description, test, path)
@main.command()
@path_param
def run(path):
from src.options.run import Runner
path = os.path.expanduser(path)
path = os.path.abspath(path)
Runner().run(path)
@main.command()
@path_param
def deploy(path):
jina_auth_login()
from src.options.deploy.deployer import Deployer
path = os.path.expanduser(path)
path = os.path.abspath(path)
Deployer().deploy(path)
@main.command()
@click.option('--key', required=True, help='Your OpenAI API key.')

View File

@@ -1,5 +1,5 @@
EXECUTOR_FILE_NAME = 'executor.py'
TEST_EXECUTOR_FILE_NAME = 'test_executor.py'
EXECUTOR_FILE_NAME = 'microservice.py'
TEST_EXECUTOR_FILE_NAME = 'test_microservice.py'
REQUIREMENTS_FILE_NAME = 'requirements.txt'
DOCKER_FILE_NAME = 'Dockerfile'
CLIENT_FILE_NAME = 'client.py'
@@ -21,12 +21,14 @@ FILE_AND_TAG_PAIRS = [
(STREAMLIT_FILE_NAME, STREAMLIT_FILE_TAG)
]
EXECUTOR_FOLDER_v1 = 'executor_v1'
EXECUTOR_FOLDER_v2 = 'executor_v2'
FLOW_URL_PLACEHOLDER = 'jcloud.jina.ai'
PRICING_GPT4_PROMPT = 0.03
PRICING_GPT4_GENERATION = 0.06
PRICING_GPT3_5_TURBO_PROMPT = 0.002
PRICING_GPT3_5_TURBO_GENERATION = 0.002
PRICING_GPT3_5_TURBO_GENERATION = 0.002
NUM_IMPLEMENTATION_STRATEGIES = 3
MAX_DEBUGGING_ITERATIONS = 10
DEMO_TOKEN = '45372338e04f5a41af949024db929d46'

37
src/options/__init__.py Normal file
View File

@@ -0,0 +1,37 @@
import os
def get_latest_folder(path):
return max([os.path.join(path, f) for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))])
def get_latest_version_path(microservice_path):
executor_name_path = get_latest_folder(microservice_path)
latest_approach_path = get_latest_folder(executor_name_path)
latest_version_path = get_latest_folder(latest_approach_path)
return latest_version_path
def get_executor_name(microservice_path):
return get_latest_folder(microservice_path).split('/')[-1]
def validate_folder_is_correct(microservice_path):
if not os.path.exists(microservice_path):
raise ValueError(f'Path {microservice_path} does not exist')
if not os.path.isdir(microservice_path):
raise ValueError(f'Path {microservice_path} is not a directory')
if len(os.listdir(microservice_path)) == 0:
raise ValueError(f'Path {microservice_path} is empty. Please generate a microservice first. Type `gptdeploy generate` for further instructions.')
if len(os.listdir(microservice_path)) > 1:
raise ValueError(f'Path {microservice_path} needs to contain only one folder. Please make sure that you only have one microservice in this folder.')
latest_version_path = get_latest_version_path(microservice_path)
required_files = [
'app.py',
'requirements.txt',
'Dockerfile',
'config.yml',
'microservice.py',
'test_microservice.py',
]
for file_name in required_files:
if not os.path.exists(os.path.join(latest_version_path, file_name)):
raise ValueError(f'Path {latest_version_path} needs to contain a file named {file_name}')

View File

View File

View File

@@ -0,0 +1,10 @@
from src.apis.jina_cloud import deploy_on_jcloud
from src.options import validate_folder_is_correct, get_executor_name, get_latest_version_path
class Deployer:
def deploy(self, microservice_path):
validate_folder_is_correct(microservice_path)
executor_name = get_executor_name(microservice_path)
latest_version_path = get_latest_version_path(microservice_path)
deploy_on_jcloud(executor_name, latest_version_path)

View File

View File

@@ -2,16 +2,16 @@ import os
import random
import re
from src import gpt, jina_cloud
from src.constants import FILE_AND_TAG_PAIRS
from src.jina_cloud import push_executor, process_error_message
from src.prompt_tasks import general_guidelines, executor_file_task, chain_of_thought_creation, test_executor_file_task, \
chain_of_thought_optimization, requirements_file_task, docker_file_task, not_allowed
from src.utils.io import persist_file
from src.apis import gpt
from src.constants import FILE_AND_TAG_PAIRS, NUM_IMPLEMENTATION_STRATEGIES, MAX_DEBUGGING_ITERATIONS
from src.apis.jina_cloud import process_error_message, push_executor
from src.options.generate.prompt_tasks import general_guidelines, chain_of_thought_creation, executor_file_task, \
not_allowed, chain_of_thought_optimization, test_executor_file_task, requirements_file_task, docker_file_task
from src.utils.io import persist_file, get_all_microservice_files_with_content, get_microservice_path
from src.utils.string_tools import print_colored
class ExecutorFactory:
class Generator:
def __init__(self, model='gpt-4'):
self.gpt_session = gpt.GPTSession(model=model)
@@ -29,99 +29,87 @@ class ExecutorFactory:
else:
return ''
def write_config_yml(self, executor_name, dest_folder):
def write_config_yml(self, microservice_name, dest_folder):
config_content = f'''
jtype: {executor_name}
jtype: {microservice_name}
py_modules:
- executor.py
- microservice.py
metas:
name: {executor_name}
name: {microservice_name}
'''
with open(os.path.join(dest_folder, 'config.yml'), 'w') as f:
f.write(config_content)
def get_all_executor_files_with_content(self, folder_path):
file_name_to_content = {}
for filename in os.listdir(folder_path):
file_path = os.path.join(folder_path, filename)
if os.path.isfile(file_path):
with open(file_path, 'r', encoding='utf-8') as file:
content = file.read()
file_name_to_content[filename] = content
return file_name_to_content
def files_to_string(self, file_name_to_content):
all_executor_files_string = ''
all_microservice_files_string = ''
for file_name, tag in FILE_AND_TAG_PAIRS:
if file_name in file_name_to_content:
all_executor_files_string += f'**{file_name}**\n'
all_executor_files_string += f'```{tag}\n'
all_executor_files_string += file_name_to_content[file_name]
all_executor_files_string += '\n```\n\n'
return all_executor_files_string.strip()
all_microservice_files_string += f'**{file_name}**\n'
all_microservice_files_string += f'```{tag}\n'
all_microservice_files_string += file_name_to_content[file_name]
all_microservice_files_string += '\n```'
return all_microservice_files_string
def wrap_content_in_code_block(self, executor_content, file_name, tag):
return f'**{file_name}**\n```{tag}\n{executor_content}\n```\n\n'
def wrap_content_in_code_block(self, microservice_content, file_name, tag):
return f'**{file_name}**\n```{tag}\n{microservice_content}\n```\n\n'
def create_executor(
def generate_microservice(
self,
description,
test,
output_path,
executor_name,
path,
microservice_name,
package,
num_approach,
is_chain_of_thought=False,
):
EXECUTOR_FOLDER_v1 = self.get_executor_path(output_path, executor_name, package, num_approach, 1)
os.makedirs(EXECUTOR_FOLDER_v1)
MICROSERVICE_FOLDER_v1 = get_microservice_path(path, microservice_name, package, num_approach, 1)
os.makedirs(MICROSERVICE_FOLDER_v1)
print_colored('', '############# Executor #############', 'red')
print_colored('', '############# Microservice #############', 'red')
user_query = (
general_guidelines()
+ executor_file_task(executor_name, description, test, package)
+ executor_file_task(microservice_name, description, test, package)
+ '\n\n' + chain_of_thought_creation()
)
conversation = self.gpt_session.get_conversation()
executor_content_raw = conversation.query(user_query)
microservice_content_raw = conversation.query(user_query)
if is_chain_of_thought:
executor_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('python', 'executor.py'))
executor_content = self.extract_content_from_result(executor_content_raw, 'executor.py', match_single_block=True)
if executor_content == '':
executor_content_raw = conversation.query('Please add the executor code.')
executor_content = self.extract_content_from_result(
executor_content_raw, 'executor.py', match_single_block=True
microservice_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('python', 'microservice.py'))
microservicer_content = self.extract_content_from_result(executor_content_raw, 'microservice.py', match_single_block=True)
if microservice_content == '':
microservice_content_raw = conversation.query('Please add the executor code.')
microservice_content = self.extract_content_from_result(
microservice_content_raw, 'microservice.py', match_single_block=True
)
persist_file(executor_content, os.path.join(EXECUTOR_FOLDER_v1, 'executor.py'))
persist_file(microservice_content, os.path.join(MICROSERVICE_FOLDER_v1, 'microservice.py'))
print_colored('', '############# Test Executor #############', 'red')
print_colored('', '############# Test Microservice #############', 'red')
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(executor_content, 'executor.py', 'python')
+ test_executor_file_task(executor_name, test)
+ self.wrap_content_in_code_block(microservice_content, 'microservice.py', 'python')
+ test_executor_file_task(microservice_name, test)
)
conversation = self.gpt_session.get_conversation()
test_executor_content_raw = conversation.query(user_query)
test_microservice_content_raw = conversation.query(user_query)
if is_chain_of_thought:
test_executor_content_raw = conversation.query(
test_microservice_content_raw = conversation.query(
f"General rules: " + not_allowed() +
chain_of_thought_optimization('python', 'test_executor.py')
chain_of_thought_optimization('python', 'test_microservice.py')
+ "Don't add any additional tests. "
)
test_executor_content = self.extract_content_from_result(
test_executor_content_raw, 'test_executor.py', match_single_block=True
microservice_content = self.extract_content_from_result(
microservice_content_raw, 'microservice.py', match_single_block=True
)
persist_file(test_executor_content, os.path.join(EXECUTOR_FOLDER_v1, 'test_executor.py'))
persist_file(microservice_content, os.path.join(MICROSERVICE_FOLDER_v1, 'test_microservice.py'))
print_colored('', '############# Requirements #############', 'red')
requirements_path = os.path.join(EXECUTOR_FOLDER_v1, 'requirements.txt')
requirements_path = os.path.join(MICROSERVICE_FOLDER_v1, 'requirements.txt')
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(executor_content, 'executor.py', 'python')
+ self.wrap_content_in_code_block(test_executor_content, 'test_executor.py', 'python')
+ self.wrap_content_in_code_block(microservice_content, 'microservice.py', 'python')
+ self.wrap_content_in_code_block(test_microservice_content, 'test_microservice.py', 'python')
+ requirements_file_task()
)
conversation = self.gpt_session.get_conversation()
@@ -136,8 +124,8 @@ class ExecutorFactory:
print_colored('', '############# Dockerfile #############', 'red')
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(executor_content, 'executor.py', 'python')
+ self.wrap_content_in_code_block(test_executor_content, 'test_executor.py', 'python')
+ self.wrap_content_in_code_block(microservice_content, 'microservice.py', 'python')
+ self.wrap_content_in_code_block(test_microservice_content, 'test_microservice.py', 'python')
+ self.wrap_content_in_code_block(requirements_content, 'requirements.txt', '')
+ docker_file_task()
)
@@ -147,60 +135,56 @@ class ExecutorFactory:
dockerfile_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('dockerfile', 'Dockerfile'))
dockerfile_content = self.extract_content_from_result(dockerfile_content_raw, 'Dockerfile', match_single_block=True)
persist_file(dockerfile_content, os.path.join(EXECUTOR_FOLDER_v1, 'Dockerfile'))
persist_file(dockerfile_content, os.path.join(MICROSERVICE_FOLDER_v1, 'Dockerfile'))
self.write_config_yml(executor_name, EXECUTOR_FOLDER_v1)
print('First version of the executor created. Start iterating on it to make the tests pass...')
self.write_config_yml(microservice_name, MICROSERVICE_FOLDER_v1)
print('First version of the microservice generated. Start iterating on it to make the tests pass...')
def create_playground(self, executor_name, executor_path, host):
def generate_playground(self, microservice_name, microservice_path):
print_colored('', '############# Playground #############', 'red')
file_name_to_content = self.get_all_executor_files_with_content(executor_path)
file_name_to_content = get_all_microservice_files_with_content(microservice_path)
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(file_name_to_content['executor.py'], 'executor.py', 'python')
+ self.wrap_content_in_code_block(file_name_to_content['test_executor.py'], 'test_executor.py',
+ self.wrap_content_in_code_block(file_name_to_content['microservice.py'], 'microservice.py', 'python')
+ self.wrap_content_in_code_block(file_name_to_content['test_microservice.py'], 'test_microservice.py',
'python')
+ f'''
Create a playground for the executor {executor_name} using streamlit.
Create a playground for the executor {microservice_name} using streamlit.
The playground must look like it was made by a professional designer.
All the ui elements are well thought out to make them visually appealing and easy to use.
The executor is hosted on {host}.
This is an example how you can connect to the executor assuming the document (d) is already defined:
```
from jina import Client, Document, DocumentArray
client = Client(host='{host}')
client = Client(host=host)
response = client.post('/', inputs=DocumentArray([d])) # always use '/'
print(response[0].text) # can also be blob in case of image/audio..., this should be visualized in the streamlit app
```
Note that the response will always be in response[0].text
Please provide the complete file with the exact same syntax to wrap the code.
The playground (app.py) must read the host from sys.argv because it will be started with a custom host: streamlit run app.py -- --host grpc://...
The playground (app.py) must not let the user configure the host on the ui.
'''
)
conversation = self.gpt_session.get_conversation([])
conversation.query(user_query)
playground_content_raw = conversation.query(chain_of_thought_optimization('python', 'app.py', 'the playground'))
playground_content = self.extract_content_from_result(playground_content_raw, 'app.py', match_single_block=True)
persist_file(playground_content, os.path.join(executor_path, 'app.py'))
persist_file(playground_content, os.path.join(miicroservice_path, 'app.py'))
def get_executor_path(self, output_path, executor_name, package, num_approach, version):
package_path = '_'.join(package)
return os.path.join(output_path, executor_name, f'{num_approach}_{package_path}', f'v{version}')
def debug_executor(self, output_path, executor_name, num_approach, packages, description, test):
MAX_DEBUGGING_ITERATIONS = 10
def debug_microservice(self, path, microservice_name, num_approach, packages, description, test):
error_before = ''
# conversation = self.gpt_session.get_conversation()
for i in range(1, MAX_DEBUGGING_ITERATIONS):
print('Debugging iteration', i)
print('Trying to build the microservice. Might take a while...')
previous_executor_path = self.get_executor_path(output_path, executor_name, packages, num_approach, i)
next_executor_path = self.get_executor_path(output_path, executor_name, packages, num_approach, i + 1)
log_hubble = push_executor(previous_executor_path)
previous_microservice_path = get_microservice_path(path, microservice_name, packages, num_approach, i)
next_microservice_path = get_microservice_path(path, microservice_name, packages, num_approach, i + 1)
log_hubble = push_executor(previous_microservice_path)
error = process_error_message(log_hubble)
if error:
os.makedirs(next_executor_path)
file_name_to_content = self.get_all_executor_files_with_content(previous_executor_path)
os.makedirs(next_microservice_path)
file_name_to_content = self.get_all_microservice_files_with_content(previous_executor_path)
is_dependency_issue = self.is_dependency_issue(error, file_name_to_content['Dockerfile'])
@@ -245,14 +229,15 @@ Please provide the complete file with the exact same syntax to wrap the code.
file_name_to_content[file_name] = updated_file
for file_name, content in file_name_to_content.items():
persist_file(content, os.path.join(next_executor_path, file_name))
persist_file(content, os.path.join(next_microservice_path, file_name))
error_before = error
else:
print('Successfully build microservice.')
break
if i == MAX_DEBUGGING_ITERATIONS - 1:
raise self.MaxDebugTimeReachedException('Could not debug the executor.')
return self.get_executor_path(output_path, executor_name, packages, num_approach, i)
raise self.MaxDebugTimeReachedException('Could not debug the microservice.')
return get_microservice_path(path, microservice_name, packages, num_approach, i)
class MaxDebugTimeReachedException(BaseException):
pass
@@ -271,7 +256,7 @@ Please provide the complete file with the exact same syntax to wrap the code.
)
return 'yes' in answer.lower()
def generate_executor_name(self, description):
def generate_microservice_name(self, description):
conversation = self.gpt_session.get_conversation()
user_query = f'''
Generate a name for the executor matching the description:
@@ -292,7 +277,7 @@ PDFParserExecutor
name = self.extract_content_from_result(name_raw, 'name.txt')
return name
def get_possible_packages(self, description, threads):
def get_possible_packages(self, description):
print_colored('', '############# What package to use? #############', 'red')
user_query = f'''
Here is the task description of the problme you need to solve:
@@ -321,29 +306,26 @@ package2,package3,...
packages_raw = conversation.query(user_query)
packages_csv_string = self.extract_content_from_result(packages_raw, 'packages.csv')
packages = [package.split(',') for package in packages_csv_string.split('\n')]
packages = packages[:threads]
packages = packages[:NUM_IMPLEMENTATION_STRATEGIES]
return packages
def create(self, description, num_approaches, output_path, test):
generated_name = self.generate_executor_name(description)
executor_name = f'{generated_name}{random.randint(0, 1000_000)}'
packages_list = self.get_possible_packages(description, num_approaches)
def generate(self, description, test, microservice_path):
generated_name = self.generate_microservice_name(description)
microservice_name = f'{generated_name}{random.randint(0, 10_000_000)}'
packages_list = self.get_possible_packages(description)
for num_approach, packages in enumerate(packages_list):
try:
self.create_executor(description, test, output_path, executor_name, packages, num_approach)
executor_path = self.debug_executor(output_path, executor_name, num_approach, packages, description, test)
host = jina_cloud.deploy_flow(executor_name, executor_path)
self.create_playground(executor_name, executor_path, host)
self.generate_microservice(description, test, microservice_path, microservice_name, packages, num_approach)
final_version_path = self.debug_microservice(microservice_path, microservice_name, num_approach, packages, description, test)
self.generate_playground(microservice_name, final_version_path)
except self.MaxDebugTimeReachedException:
print('Could not debug the Executor.')
print('Could not debug the Microservice.')
continue
print(f'''
Executor name: {executor_name}
Executor path: {executor_path}
Host: {host}
Run the following command to start the playground:
streamlit run {os.path.join(executor_path, "app.py")}
You can now run or deploy your microservice:
gptdeploy run --path {microservice_path}
gptdeploy deploy --path {microservice_path}
'''
)
break

View File

@@ -3,9 +3,8 @@ from src.constants import FLOW_URL_PLACEHOLDER
executor_example = '''Using the Jina framework, users can define executors.
Here is an example of how an executor can be defined. It always starts with a comment:
**executor.py**
**microservice.py**
```python
# this executor binary files as input and returns the length of each binary file as output
from jina import Executor, requests, DocumentArray, Document
import json
class MyInfoExecutor(Executor):

View File

@@ -55,7 +55,7 @@ def test_executor_file_task(executor_name, test_scenario):
if test_scenario else ""
)
+ "Use the following import to import the executor: "
f"```\nfrom executor import {executor_name}\n```"
f"```\nfrom microservice import {executor_name}\n```"
+ not_allowed()
+ "The test must not open local files. "
+ "The test must not mock a function of the executor. "

View File

@@ -0,0 +1 @@
from src.options.run.runner import Runner

11
src/options/run/runner.py Normal file
View File

@@ -0,0 +1,11 @@
from src.apis.jina_cloud import run_locally
from src.options import validate_folder_is_correct, get_executor_name, get_latest_version_path
class Runner():
def run(self, microservice_path):
validate_folder_is_correct(microservice_path)
executor_name = get_executor_name(microservice_path)
latest_version_path = get_latest_version_path(microservice_path)
run_locally(executor_name, latest_version_path)

View File

@@ -1,53 +0,0 @@
# from fastapi import FastAPI
# from fastapi.exceptions import RequestValidationError
# from pydantic import BaseModel
# from typing import Optional, Dict
#
# from starlette.middleware.cors import CORSMiddleware
# from starlette.requests import Request
# from starlette.responses import JSONResponse
# from main import main
#
# app = FastAPI()
#
# # Define the request model
# class CreateRequest(BaseModel):
# test_scenario: str
# executor_description: str
#
# # Define the response model
# class CreateResponse(BaseModel):
# result: Dict[str, str]
# success: bool
# message: Optional[str]
#
# @app.post("/create", response_model=CreateResponse)
# def create_endpoint(request: CreateRequest):
#
# result = main(
# executor_description=request.executor_description,
# test_scenario=request.test_scenario,
# )
# return CreateResponse(result=result, success=True, message=None)
#
#
# app.add_middleware(
# CORSMiddleware,
# allow_origins=["*"],
# allow_credentials=True,
# allow_methods=["*"],
# allow_headers=["*"],
# )
#
# # Add a custom exception handler for RequestValidationError
# @app.exception_handler(RequestValidationError)
# def validation_exception_handler(request: Request, exc: RequestValidationError):
# return JSONResponse(
# status_code=422,
# content={"detail": exc.errors()},
# )
#
#
# if __name__ == "__main__":
# import uvicorn
# uvicorn.run("server:app", host="0.0.0.0", port=8000, log_level="info")

View File

@@ -1,17 +1,37 @@
import os
import shutil
import concurrent.futures
import concurrent.futures
from typing import Generator
import sys
from contextlib import contextmanager
import docker
from docker import APIClient
def get_microservice_path(path, microservice_name, package, num_approach, version):
package_path = '_'.join(package)
return os.path.join(path, microservice_name, f'{num_approach}_{package_path}', f'v{version}')
def persist_file(file_content, file_path):
with open(file_path, 'w') as f:
f.write(file_content)
def get_all_microservice_files_with_content(folder_path):
file_name_to_content = {}
for filename in os.listdir(folder_path):
file_path = os.path.join(folder_path, filename)
if os.path.isfile(file_path):
with open(file_path, 'r', encoding='utf-8') as file:
content = file.read()
file_name_to_content[filename] = content
return file_name_to_content
class GenerationTimeoutError(Exception):
pass
@@ -42,4 +62,19 @@ def suppress_stdout():
yield
finally:
sys.stdout.close()
sys.stdout = original_stdout
sys.stdout = original_stdout
def is_docker_running():
try:
from hubble import __windows__
_client = docker.from_env()
# low-level client
_raw_client = APIClient(
base_url=docker.constants.DEFAULT_NPIPE
if __windows__
else docker.constants.DEFAULT_UNIX_SOCKET
)
except Exception:
return False
return True