feat: new structure

This commit is contained in:
Florian Hönicke
2023-04-14 16:07:05 +02:00
parent e5a9336619
commit a65b5ff9df
22 changed files with 436 additions and 247 deletions

0
src/apis/__init__.py Normal file
View File

View File

@@ -7,13 +7,13 @@ from openai.error import RateLimitError, Timeout
from src.constants import PRICING_GPT4_PROMPT, PRICING_GPT4_GENERATION, PRICING_GPT3_5_TURBO_PROMPT, \
PRICING_GPT3_5_TURBO_GENERATION
from src.prompt_system import system_base_definition
from src.options.generate.prompt_system import system_base_definition
from src.utils.io import timeout_generator_wrapper, GenerationTimeoutError
from src.utils.string_tools import print_colored
class GPTSession:
def __init__(self):
self.get_openai_api_key()
self.configure_openai_api_key()
if self.is_gpt4_available():
self.supported_model = 'gpt-4'
self.pricing_prompt = PRICING_GPT4_PROMPT
@@ -26,7 +26,7 @@ class GPTSession:
self.chars_prompt_so_far = 0
self.chars_generation_so_far = 0
def get_openai_api_key(self):
def configure_openai_api_key(self):
if 'OPENAI_API_KEY' not in os.environ:
raise Exception('''
You need to set OPENAI_API_KEY in your environment.

View File

@@ -3,17 +3,41 @@ import json
import os
import re
import subprocess
import threading
import webbrowser
from pathlib import Path
import click
import hubble
from hubble.executor.helper import upload_file, archive_package, get_request_header
from jcloud.flow import CloudFlow
from jina import Flow
from src.utils.io import suppress_stdout
from src.utils.io import suppress_stdout, is_docker_running
from src.utils.string_tools import print_colored
import requests
import time
def wait_until_app_is_ready(url):
is_app_ready = False
while not is_app_ready:
try:
response = requests.get(url)
print('waiting for app to be ready...')
if response.status_code == 200:
is_app_ready = True
except requests.exceptions.RequestException:
pass
time.sleep(0.5)
def open_streamlit_app():
url = "http://localhost:8081/playground"
wait_until_app_is_ready(url)
webbrowser.open(url, new=2)
def redirect_callback(href):
print(
f'You need login to Jina first to use GPTDeploy\n'
@@ -85,29 +109,89 @@ def get_user_name():
return response['data']['name']
def deploy_on_jcloud(flow_yaml):
def _deploy_on_jcloud(flow_yaml):
cloud_flow = CloudFlow(path=flow_yaml)
return cloud_flow.__enter__().endpoints['gateway']
def deploy_flow(executor_name, dest_folder):
def deploy_on_jcloud(executor_name, microservice_path):
print('Deploy a jina flow')
full_flow_path = create_flow_yaml(microservice_path, executor_name, use_docker=True)
for i in range(3):
try:
host = _deploy_on_jcloud(flow_yaml=full_flow_path)
break
except Exception as e:
print(f'Could not deploy on Jina Cloud. Trying again in 5 seconds. Error: {e}')
time.sleep(5)
if i == 2:
raise Exception('''
Could not deploy on Jina Cloud.
This can happen when the microservice is buggy, if it requires too much memory or if the Jina Cloud is overloaded.
Please try again later.
'''
)
print(f'''
Your Microservice is deployed.
Run the following command to start the playground:
streamlit run {os.path.join(microservice_path, "app.py")} --server.port 8081 --server.address 0.0.0.0 -- --host http://{host}
'''
)
return host
def run_streamlit_app(app_path):
subprocess.run(['streamlit', 'run', app_path, 'server.address', '0.0.0.0', '--server.port', '8081', '--', '--host', 'grpc://localhost:8080'])
def run_locally(executor_name, microservice_version_path):
if is_docker_running():
use_docker = True
else:
click.echo('Docker daemon doesn\'t seem to be running. Trying to start it without docker')
use_docker = False
print('Run a jina flow locally')
full_flow_path = create_flow_yaml(microservice_version_path, executor_name, use_docker)
flow = Flow.load_config(full_flow_path)
with flow:
print(f'''
Your microservice started locally.
We now start the playground for you.
''')
app_path = os.path.join(microservice_version_path, "app.py")
# Run the Streamlit app in a separate thread
streamlit_thread = threading.Thread(target=run_streamlit_app, args=(app_path,))
streamlit_thread.start()
# Open the Streamlit app in the user's default web browser
open_streamlit_app()
flow.block()
def create_flow_yaml(dest_folder, executor_name, use_docker):
if use_docker:
prefix = 'jinaai+docker'
else:
prefix = 'jinaai'
flow = f'''
jtype: Flow
with:
name: nowapi
env:
JINA_LOG_LEVEL: DEBUG
port: 8080
jcloud:
version: 3.14.2.dev18
labels:
creator: microchain
name: gptdeploy
executors:
- name: {executor_name.lower()}
uses: jinaai+docker://{get_user_name()}/{executor_name}:latest
env:
JINA_LOG_LEVEL: DEBUG
uses: {prefix }://{get_user_name()}/{executor_name}:latest
{"" if use_docker else "install-requirements: True"}
jcloud:
resources:
instance: C2
@@ -117,16 +201,7 @@ executors:
'flow.yml')
with open(full_flow_path, 'w') as f:
f.write(flow)
for i in range(3):
try:
host = deploy_on_jcloud(flow_yaml=full_flow_path)
break
except Exception as e:
raise e
print(f'Flow is deployed create the playground for {host}')
return host
return full_flow_path
def replace_client_line(file_content: str, replacement: str) -> str:

View File

@@ -1,30 +1,80 @@
import functools
import os
import click
from src.executor_factory import ExecutorFactory
from src.jina_cloud import jina_auth_login
from src.key_handling import set_api_key
from src.apis.jina_cloud import jina_auth_login
from src.options.configure.key_handling import set_api_key
def exception_interceptor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
raise type(e)(f'''
{str(e)}
😱😱😱 Sorry for this experience. Could you please report an issue about this on our github repo? We'll try to fix it asap.
''') from e
return wrapper
def path_param(func):
@click.option('--path', required=True, help='Path to the generated microservice.')
@functools.wraps(func)
def wrapper(*args, **kwargs):
path = os.path.expanduser(kwargs['path'])
path = os.path.abspath(path)
kwargs['path'] = path
return func(*args, **kwargs)
return wrapper
@click.group(invoke_without_command=True)
def main():
@click.pass_context
@exception_interceptor
def main(ctx):
if ctx.invoked_subcommand is None:
click.echo(ctx.get_help())
jina_auth_login()
@main.command()
@click.option('--description', required=True, help='Description of the executor.')
@click.option('--test', required=True, help='Test scenario for the executor.')
@click.option('--num_approaches', default=3, type=int,
help='Number of num_approaches to use to fulfill the task (default: 3).')
@click.option('--output_path', default='executor', help='Path to the output folder (must be empty). ')
def create(
@click.option('--description', required=True, help='Description of the microservice.')
@click.option('--test', required=True, help='Test scenario for the microservice.')
@path_param
def generate(
description,
test,
num_approaches=3,
output_path='executor',
path,
):
executor_factory = ExecutorFactory()
executor_factory.create(description, num_approaches, output_path, test)
from src.options.generate.generator import Generator
path = os.path.expanduser(path)
path = os.path.abspath(path)
if os.path.exists(path):
if os.listdir(path):
click.echo(f"Error: The path {path} you provided via --path is not empty. Please choose a directory that does not exist or is empty.")
return
generator = Generator()
generator.generate(description, test, path)
@main.command()
@path_param
def run(path):
from src.options.run import Runner
path = os.path.expanduser(path)
path = os.path.abspath(path)
Runner().run(path)
@main.command()
@path_param
def deploy(path):
from src.options.deploy.deployer import Deployer
path = os.path.expanduser(path)
path = os.path.abspath(path)
Deployer().deploy(path)
@main.command()
@click.option('--key', required=True, help='Your OpenAI API key.')

View File

@@ -1,5 +1,5 @@
EXECUTOR_FILE_NAME = 'executor.py'
TEST_EXECUTOR_FILE_NAME = 'test_executor.py'
EXECUTOR_FILE_NAME = 'microservice.py'
TEST_EXECUTOR_FILE_NAME = 'test_microservice.py'
REQUIREMENTS_FILE_NAME = 'requirements.txt'
DOCKER_FILE_NAME = 'Dockerfile'
CLIENT_FILE_NAME = 'client.py'
@@ -21,12 +21,12 @@ FILE_AND_TAG_PAIRS = [
(STREAMLIT_FILE_NAME, STREAMLIT_FILE_TAG)
]
EXECUTOR_FOLDER_v1 = 'executor_v1'
EXECUTOR_FOLDER_v2 = 'executor_v2'
FLOW_URL_PLACEHOLDER = 'jcloud.jina.ai'
PRICING_GPT4_PROMPT = 0.03
PRICING_GPT4_GENERATION = 0.06
PRICING_GPT3_5_TURBO_PROMPT = 0.002
PRICING_GPT3_5_TURBO_GENERATION = 0.002
PRICING_GPT3_5_TURBO_GENERATION = 0.002
NUM_IMPLEMENTATION_STRATEGIES = 3
MAX_DEBUGGING_ITERATIONS = 10

37
src/options/__init__.py Normal file
View File

@@ -0,0 +1,37 @@
import os
def get_latest_folder(path):
return max([os.path.join(path, f) for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))])
def get_latest_version_path(microservice_path):
executor_name_path = get_latest_folder(microservice_path)
latest_approach_path = get_latest_folder(executor_name_path)
latest_version_path = get_latest_folder(latest_approach_path)
return latest_version_path
def get_executor_name(microservice_path):
return get_latest_folder(microservice_path).split('/')[-1]
def validate_folder_is_correct(microservice_path):
if not os.path.exists(microservice_path):
raise ValueError(f'Path {microservice_path} does not exist')
if not os.path.isdir(microservice_path):
raise ValueError(f'Path {microservice_path} is not a directory')
if len(os.listdir(microservice_path)) == 0:
raise ValueError(f'Path {microservice_path} is empty. Please generate a microservice first. Type `gptdeploy generate` for further instructions.')
if len(os.listdir(microservice_path)) > 1:
raise ValueError(f'Path {microservice_path} needs to contain only one folder. Please make sure that you only have one microservice in this folder.')
latest_version_path = get_latest_version_path(microservice_path)
required_files = [
'app.py',
'requirements.txt',
'Dockerfile',
'config.yml',
'microservice.py',
'test_microservice.py',
]
for file_name in required_files:
if not os.path.exists(os.path.join(latest_version_path, file_name)):
raise ValueError(f'Path {latest_version_path} needs to contain a file named {file_name}')

View File

View File

View File

@@ -0,0 +1,10 @@
from src.apis.jina_cloud import deploy_on_jcloud
from src.options import validate_folder_is_correct, get_executor_name, get_latest_version_path
class Deployer:
def deploy(self, microservice_path):
validate_folder_is_correct(microservice_path)
executor_name = get_executor_name(microservice_path)
latest_version_path = get_latest_version_path(microservice_path)
deploy_on_jcloud(executor_name, latest_version_path)

View File

View File

@@ -2,16 +2,16 @@ import os
import random
import re
from src import gpt, jina_cloud
from src.constants import FILE_AND_TAG_PAIRS
from src.jina_cloud import push_executor, process_error_message
from src.prompt_tasks import general_guidelines, executor_file_task, chain_of_thought_creation, test_executor_file_task, \
chain_of_thought_optimization, requirements_file_task, docker_file_task, not_allowed
from src.utils.io import persist_file
from src.apis import gpt
from src.constants import FILE_AND_TAG_PAIRS, NUM_IMPLEMENTATION_STRATEGIES, MAX_DEBUGGING_ITERATIONS
from src.apis.jina_cloud import process_error_message, push_executor
from src.options.generate.prompt_tasks import general_guidelines, chain_of_thought_creation, executor_file_task, \
not_allowed, chain_of_thought_optimization, test_executor_file_task, requirements_file_task, docker_file_task
from src.utils.io import persist_file, get_all_microservice_files_with_content, get_microservice_path
from src.utils.string_tools import print_colored
class ExecutorFactory:
class Generator:
def __init__(self):
self.gpt_session = gpt.GPTSession()
@@ -23,93 +23,81 @@ class ExecutorFactory:
else:
return ''
def write_config_yml(self, executor_name, dest_folder):
def write_config_yml(self, microservice_name, dest_folder):
config_content = f'''
jtype: {executor_name}
jtype: {microservice_name}
py_modules:
- executor.py
- microservice.py
metas:
name: {executor_name}
name: {microservice_name}
'''
with open(os.path.join(dest_folder, 'config.yml'), 'w') as f:
f.write(config_content)
def get_all_executor_files_with_content(self, folder_path):
file_name_to_content = {}
for filename in os.listdir(folder_path):
file_path = os.path.join(folder_path, filename)
if os.path.isfile(file_path):
with open(file_path, 'r', encoding='utf-8') as file:
content = file.read()
file_name_to_content[filename] = content
return file_name_to_content
def files_to_string(self, file_name_to_content):
all_executor_files_string = ''
all_microservice_files_string = ''
for file_name, tag in FILE_AND_TAG_PAIRS:
if file_name in file_name_to_content:
all_executor_files_string += f'**{file_name}**\n'
all_executor_files_string += f'```{tag}\n'
all_executor_files_string += file_name_to_content[file_name]
all_executor_files_string += '\n```\n\n'
return all_executor_files_string
all_microservice_files_string += f'**{file_name}**\n'
all_microservice_files_string += f'```{tag}\n'
all_microservice_files_string += file_name_to_content[file_name]
all_microservice_files_string += '\n```\n\n'
return all_microservice_files_string
def wrap_content_in_code_block(self, executor_content, file_name, tag):
return f'**{file_name}**\n```{tag}\n{executor_content}\n```\n\n'
def wrap_content_in_code_block(self, microservice_content, file_name, tag):
return f'**{file_name}**\n```{tag}\n{microservice_content}\n```\n\n'
def create_executor(
def generate_microservice(
self,
description,
test,
output_path,
executor_name,
path,
microservice_name,
package,
num_approach,
is_chain_of_thought=False,
):
EXECUTOR_FOLDER_v1 = self.get_executor_path(output_path, executor_name, package, num_approach, 1)
os.makedirs(EXECUTOR_FOLDER_v1)
MICROSERVICE_FOLDER_v1 = get_microservice_path(path, microservice_name, package, num_approach, 1)
os.makedirs(MICROSERVICE_FOLDER_v1)
print_colored('', '############# Executor #############', 'red')
print_colored('', '############# Microservice #############', 'red')
user_query = (
general_guidelines()
+ executor_file_task(executor_name, description, test, package)
+ executor_file_task(microservice_name, description, test, package)
+ chain_of_thought_creation()
)
conversation = self.gpt_session.get_conversation()
executor_content_raw = conversation.query(user_query)
microservice_content_raw = conversation.query(user_query)
if is_chain_of_thought:
executor_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('python', 'executor.py'))
executor_content = self.extract_content_from_result(executor_content_raw, 'executor.py')
microservice_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('python', 'microservice.py'))
microservice_content = self.extract_content_from_result(microservice_content_raw, 'microservice.py')
persist_file(executor_content, os.path.join(EXECUTOR_FOLDER_v1, 'executor.py'))
persist_file(microservice_content, os.path.join(MICROSERVICE_FOLDER_v1, 'microservice.py'))
print_colored('', '############# Test Executor #############', 'red')
print_colored('', '############# Test Microservice #############', 'red')
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(executor_content, 'executor.py', 'python')
+ test_executor_file_task(executor_name, test)
+ self.wrap_content_in_code_block(microservice_content, 'microservice.py', 'python')
+ test_executor_file_task(microservice_name, test)
)
conversation = self.gpt_session.get_conversation()
test_executor_content_raw = conversation.query(user_query)
test_microservice_content_raw = conversation.query(user_query)
if is_chain_of_thought:
test_executor_content_raw = conversation.query(
test_microservice_content_raw = conversation.query(
f"General rules: " + not_allowed() +
chain_of_thought_optimization('python', 'test_executor.py')
chain_of_thought_optimization('python', 'test_microservice.py')
+ "Don't add any additional tests. "
)
test_executor_content = self.extract_content_from_result(test_executor_content_raw, 'test_executor.py')
persist_file(test_executor_content, os.path.join(EXECUTOR_FOLDER_v1, 'test_executor.py'))
test_microservice_content = self.extract_content_from_result(test_microservice_content_raw, 'test_microservice.py')
persist_file(test_microservice_content, os.path.join(MICROSERVICE_FOLDER_v1, 'test_microservice.py'))
print_colored('', '############# Requirements #############', 'red')
requirements_path = os.path.join(EXECUTOR_FOLDER_v1, 'requirements.txt')
requirements_path = os.path.join(MICROSERVICE_FOLDER_v1, 'requirements.txt')
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(executor_content, 'executor.py', 'python')
+ self.wrap_content_in_code_block(test_executor_content, 'test_executor.py', 'python')
+ self.wrap_content_in_code_block(microservice_content, 'microservice.py', 'python')
+ self.wrap_content_in_code_block(test_microservice_content, 'test_microservice.py', 'python')
+ requirements_file_task()
)
conversation = self.gpt_session.get_conversation()
@@ -124,8 +112,8 @@ class ExecutorFactory:
print_colored('', '############# Dockerfile #############', 'red')
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(executor_content, 'executor.py', 'python')
+ self.wrap_content_in_code_block(test_executor_content, 'test_executor.py', 'python')
+ self.wrap_content_in_code_block(microservice_content, 'microservice.py', 'python')
+ self.wrap_content_in_code_block(test_microservice_content, 'test_microservice.py', 'python')
+ self.wrap_content_in_code_block(requirements_content, 'requirements.txt', '')
+ docker_file_task()
)
@@ -135,28 +123,29 @@ class ExecutorFactory:
dockerfile_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('dockerfile', 'Dockerfile'))
dockerfile_content = self.extract_content_from_result(dockerfile_content_raw, 'Dockerfile')
persist_file(dockerfile_content, os.path.join(EXECUTOR_FOLDER_v1, 'Dockerfile'))
persist_file(dockerfile_content, os.path.join(MICROSERVICE_FOLDER_v1, 'Dockerfile'))
self.write_config_yml(executor_name, EXECUTOR_FOLDER_v1)
print('First version of the executor created. Start iterating on it to make the tests pass...')
self.write_config_yml(microservice_name, MICROSERVICE_FOLDER_v1)
print('First version of the microservice generated. Start iterating on it to make the tests pass...')
def create_playground(self, executor_name, executor_path, host):
def generate_playground(self, microservice_name, microservice_path):
print_colored('', '############# Playground #############', 'red')
file_name_to_content = self.get_all_executor_files_with_content(executor_path)
file_name_to_content = get_all_microservice_files_with_content(microservice_path)
user_query = (
general_guidelines()
+ self.wrap_content_in_code_block(file_name_to_content['executor.py'], 'executor.py', 'python')
+ self.wrap_content_in_code_block(file_name_to_content['test_executor.py'], 'test_executor.py',
+ self.wrap_content_in_code_block(file_name_to_content['microservice.py'], 'microservice.py', 'python')
+ self.wrap_content_in_code_block(file_name_to_content['test_microservice.py'], 'test_microservice.py',
'python')
+ f'''
Create a playground for the executor {executor_name} using streamlit.
Create a playground for the executor {microservice_name} using streamlit.
The playground must look like it was made by a professional designer.
All the ui elements are well thought out to make them visually appealing and easy to use.
The executor is hosted on {host}.
The playground must be started with a custom host: streamlit run app.py -- --host grpc://...
The playground must not let the user configure the --host grpc://... on the ui.
This is an example how you can connect to the executor assuming the document (d) is already defined:
from jina import Client, Document, DocumentArray
client = Client(host='{host}')
client = Client(host=host)
response = client.post('/', inputs=DocumentArray([d])) # always use '/'
print(response[0].text) # can also be blob in case of image/audio..., this should be visualized in the streamlit app
'''
@@ -166,25 +155,21 @@ print(response[0].text) # can also be blob in case of image/audio..., this shoul
playground_content_raw = conversation.query(
f"General rules: " + not_allowed() + chain_of_thought_optimization('python', 'app.py'))
playground_content = self.extract_content_from_result(playground_content_raw, 'app.py')
persist_file(playground_content, os.path.join(executor_path, 'app.py'))
persist_file(playground_content, os.path.join(microservice_path, 'app.py'))
def get_executor_path(self, output_path, executor_name, package, num_approach, version):
package_path = '_'.join(package)
return os.path.join(output_path, executor_name, f'{num_approach}_{package_path}', f'v{version}')
def debug_executor(self, output_path, executor_name, num_approach, packages, description, test):
MAX_DEBUGGING_ITERATIONS = 10
def debug_microservice(self, path, microservice_name, num_approach, packages, description, test):
error_before = ''
for i in range(1, MAX_DEBUGGING_ITERATIONS):
print('Debugging iteration', i)
print('Trying to build the microservice. Might take a while...')
previous_executor_path = self.get_executor_path(output_path, executor_name, packages, num_approach, i)
next_executor_path = self.get_executor_path(output_path, executor_name, packages, num_approach, i + 1)
log_hubble = push_executor(previous_executor_path)
previous_microservice_path = get_microservice_path(path, microservice_name, packages, num_approach, i)
next_microservice_path = get_microservice_path(path, microservice_name, packages, num_approach, i + 1)
log_hubble = push_executor(previous_microservice_path)
error = process_error_message(log_hubble)
if error:
os.makedirs(next_executor_path)
file_name_to_content = self.get_all_executor_files_with_content(previous_executor_path)
os.makedirs(next_microservice_path)
file_name_to_content = get_all_microservice_files_with_content(previous_microservice_path)
all_files_string = self.files_to_string(file_name_to_content)
user_query = (
f"General rules: " + not_allowed()
@@ -217,19 +202,19 @@ print(response[0].text) # can also be blob in case of image/audio..., this shoul
file_name_to_content[file_name] = updated_file
for file_name, content in file_name_to_content.items():
persist_file(content, os.path.join(next_executor_path, file_name))
persist_file(content, os.path.join(next_microservice_path, file_name))
error_before = error
else:
break
if i == MAX_DEBUGGING_ITERATIONS - 1:
raise self.MaxDebugTimeReachedException('Could not debug the executor.')
return self.get_executor_path(output_path, executor_name, packages, num_approach, i)
raise self.MaxDebugTimeReachedException('Could not debug the microservice.')
return get_microservice_path(path, microservice_name, packages, num_approach, i)
class MaxDebugTimeReachedException(BaseException):
pass
def generate_executor_name(self, description):
def generate_microservice_name(self, description):
conversation = self.gpt_session.get_conversation()
user_query = f'''
Generate a name for the executor matching the description:
@@ -250,7 +235,7 @@ PDFParserExecutor
name = self.extract_content_from_result(name_raw, 'name.txt')
return name
def get_possible_packages(self, description, threads):
def get_possible_packages(self, description):
print_colored('', '############# What package to use? #############', 'red')
user_query = f'''
Here is the task description of the problme you need to solve:
@@ -279,29 +264,26 @@ package2,package3,...
packages_raw = conversation.query(user_query)
packages_csv_string = self.extract_content_from_result(packages_raw, 'packages.csv')
packages = [package.split(',') for package in packages_csv_string.split('\n')]
packages = packages[:threads]
packages = packages[:NUM_IMPLEMENTATION_STRATEGIES]
return packages
def create(self, description, num_approaches, output_path, test):
generated_name = self.generate_executor_name(description)
executor_name = f'{generated_name}{random.randint(0, 1000_000)}'
packages_list = self.get_possible_packages(description, num_approaches)
def generate(self, description, test, microservice_path):
generated_name = self.generate_microservice_name(description)
microservice_name = f'{generated_name}{random.randint(0, 10_000_000)}'
packages_list = self.get_possible_packages(description)
for num_approach, packages in enumerate(packages_list):
try:
self.create_executor(description, test, output_path, executor_name, packages, num_approach)
executor_path = self.debug_executor(output_path, executor_name, num_approach, packages, description, test)
host = jina_cloud.deploy_flow(executor_name, executor_path)
self.create_playground(executor_name, executor_path, host)
self.generate_microservice(description, test, microservice_path, microservice_name, packages, num_approach)
final_version_path = self.debug_microservice(microservice_path, microservice_name, num_approach, packages, description, test)
self.generate_playground(microservice_name, final_version_path)
except self.MaxDebugTimeReachedException:
print('Could not debug the Executor.')
print('Could not debug the Microservice.')
continue
print(f'''
Executor name: {executor_name}
Executor path: {executor_path}
Host: {host}
Run the following command to start the playground:
streamlit run {os.path.join(executor_path, "app.py")}
You can now run or deploy your microservice:
gptdeploy run --path {microservice_path}
gptdeploy deploy --path {microservice_path}
'''
)
break

View File

@@ -4,9 +4,8 @@ executor_example = '''
Using the Jina framework, users can define executors.
Here is an example of how an executor can be defined. It always starts with a comment:
**executor.py**
**microservice.py**
```python
# this executor binary files as input and returns the length of each binary file as output
from jina import Executor, requests, DocumentArray, Document
import json
class MyInfoExecutor(Executor):

View File

@@ -50,7 +50,7 @@ def test_executor_file_task(executor_name, test_scenario):
if test_scenario else ""
)
+ "Use the following import to import the executor: "
f"from executor import {executor_name} "
f"from microservice import {executor_name} "
+ not_allowed()
+ "The test must not open local files. "
+ "The test must not mock a function of the executor. "

View File

@@ -0,0 +1 @@
from src.options.run.runner import Runner

11
src/options/run/runner.py Normal file
View File

@@ -0,0 +1,11 @@
from src.apis.jina_cloud import run_locally
from src.options import validate_folder_is_correct, get_executor_name, get_latest_version_path
class Runner():
def run(self, microservice_path):
validate_folder_is_correct(microservice_path)
executor_name = get_executor_name(microservice_path)
latest_version_path = get_latest_version_path(microservice_path)
run_locally(executor_name, latest_version_path)

View File

@@ -1,53 +0,0 @@
# from fastapi import FastAPI
# from fastapi.exceptions import RequestValidationError
# from pydantic import BaseModel
# from typing import Optional, Dict
#
# from starlette.middleware.cors import CORSMiddleware
# from starlette.requests import Request
# from starlette.responses import JSONResponse
# from main import main
#
# app = FastAPI()
#
# # Define the request model
# class CreateRequest(BaseModel):
# test_scenario: str
# executor_description: str
#
# # Define the response model
# class CreateResponse(BaseModel):
# result: Dict[str, str]
# success: bool
# message: Optional[str]
#
# @app.post("/create", response_model=CreateResponse)
# def create_endpoint(request: CreateRequest):
#
# result = main(
# executor_description=request.executor_description,
# test_scenario=request.test_scenario,
# )
# return CreateResponse(result=result, success=True, message=None)
#
#
# app.add_middleware(
# CORSMiddleware,
# allow_origins=["*"],
# allow_credentials=True,
# allow_methods=["*"],
# allow_headers=["*"],
# )
#
# # Add a custom exception handler for RequestValidationError
# @app.exception_handler(RequestValidationError)
# def validation_exception_handler(request: Request, exc: RequestValidationError):
# return JSONResponse(
# status_code=422,
# content={"detail": exc.errors()},
# )
#
#
# if __name__ == "__main__":
# import uvicorn
# uvicorn.run("server:app", host="0.0.0.0", port=8000, log_level="info")

View File

@@ -1,17 +1,37 @@
import os
import shutil
import concurrent.futures
import concurrent.futures
from typing import Generator
import sys
from contextlib import contextmanager
import docker
from docker import APIClient
def get_microservice_path(path, microservice_name, package, num_approach, version):
package_path = '_'.join(package)
return os.path.join(path, microservice_name, f'{num_approach}_{package_path}', f'v{version}')
def persist_file(file_content, file_path):
with open(file_path, 'w') as f:
f.write(file_content)
def get_all_microservice_files_with_content(folder_path):
file_name_to_content = {}
for filename in os.listdir(folder_path):
file_path = os.path.join(folder_path, filename)
if os.path.isfile(file_path):
with open(file_path, 'r', encoding='utf-8') as file:
content = file.read()
file_name_to_content[filename] = content
return file_name_to_content
class GenerationTimeoutError(Exception):
pass
@@ -42,4 +62,19 @@ def suppress_stdout():
yield
finally:
sys.stdout.close()
sys.stdout = original_stdout
sys.stdout = original_stdout
def is_docker_running():
try:
from hubble import __windows__
_client = docker.from_env()
# low-level client
_raw_client = APIClient(
base_url=docker.constants.DEFAULT_NPIPE
if __windows__
else docker.constants.DEFAULT_UNIX_SOCKET
)
except Exception:
return False
return True