🌈 docs: example rainbow tweet

This commit is contained in:
Florian Hönicke
2023-05-15 13:50:00 +02:00
parent 53ae1d6f4c
commit e9f8ff41ca
46 changed files with 1041 additions and 0 deletions

View File

@@ -0,0 +1,77 @@
console.log('Twitter Rewrite: Content script loaded');
let openai_api_key = '';
// Get OPENAI_API_KEY from chrome storage
chrome.storage.sync.get({
openai_api_key: ''
}, function(items) {
openai_api_key = items.openai_api_key;
});
let observer = new MutationObserver((mutations) => {
console.log('Twitter Rewrite: DOM mutation detected');
// For each mutation
mutations.forEach((mutation) => {
// If nodes were added
if (mutation.addedNodes) {
mutation.addedNodes.forEach((node) => {
// If the added node (or its descendants) contains a tweet
let tweets = node.querySelectorAll('[data-testid="tweetText"]');
tweets.forEach((tweet) => {
// If the tweet doesn't already have a modify button
if (!tweet.querySelector('.modify-button')) {
// Create new button
let button = document.createElement('button');
if (openai_api_key === '') {
button.innerText = 'Set OPENAI_API_KEY by clicking the extension icon';
button.disabled = true;
} else {
button.innerText = '🦄';
button.disabled = false;
}
button.className = 'modify-button';
// Add event listener for button click
button.addEventListener('click', function() {
// Send tweet to API
let originalTweet = tweet.innerText;
this.disabled = true;
this.innerText = 'Loading...';
fetch('https://gptdeploy-61694dd6a3.wolf.jina.ai/post', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'accept': 'application/json'
},
body: JSON.stringify({
"data": [{"text": JSON.stringify({
"tweet": originalTweet,
"OPENAI_API_KEY": openai_api_key
}) }]
})
})
.then(response => response.json())
.then(data => {
let modifiedTweet = JSON.parse(data.data[0].text).positive_tweet;
let rainbowTweet = Array.from(modifiedTweet).map((char, i) =>
`<span class="rainbow-text" style="--i: ${i};">${char}</span>`
).join('');
// Create a new element node to contain the HTML
let newTweet = document.createElement('span');
newTweet.innerHTML = rainbowTweet;
// Replace the old text node with the new element node
tweet.replaceWith(newTweet);
});
});
// Inject button into tweet
tweet.appendChild(button);
}
});
});
}
});
});
// Start observing the document with the configured parameters
observer.observe(document.body, { childList: true, subtree: true });

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

View File

@@ -0,0 +1,27 @@
{
"manifest_version": 3,
"name": "Rainbow-Tweet",
"description": "The Rainbow-Tweet plugin allows the user to convert any tweet into positive language by clicking a button on the tweet.",
"version": "0.0.0.1",
"icons": {
"128": "logo.png"
},
"action": {
"default_icon": {
"128": "logo.png"
},
"default_title": "Configure API Key",
"default_popup": "popup.html"
},
"permissions": [
"storage"
],
"content_scripts": [
{
"matches": ["https://twitter.com/*"],
"js": ["content.js"],
"css": ["styles.css"],
"run_at": "document_end"
}
]
}

View File

@@ -0,0 +1,37 @@
body {
font-family: Arial, sans-serif;
}
.container {
width: 300px;
padding: 20px;
}
h1 {
color: #444;
}
.btn {
color: white;
background-color: #1da1f2;
border: none;
padding: 10px 20px;
margin-top: 10px;
cursor: pointer;
}
.footer {
margin-top: 20px;
text-align: center;
}
.btn:hover {
background-color: #0c84d2;
}
.form-group {
margin-bottom: 15px;
}
.form-text {
font-size: 0.875em;
color: #6c757d;
}

View File

@@ -0,0 +1,31 @@
<!DOCTYPE html>
<html>
<head>
<title>Twitter Rewrite: Extension Options</title>
<link rel="stylesheet" type="text/css" href="popup.css">
</head>
<body>
<div class="container">
<h1>Twitter Rewrite: Extension Options</h1>
<form id="optionForm">
<div class="form-group">
<label for="openai_api_key">OpenAI API Key:</label><br>
<input type="text" id="openai_api_key" name="openai_api_key"><br>
<small id="apiHelp" class="form-text text-muted">Enter your OpenAI API Key to start using the plugin. If you
don't have one, create it <a href="https://platform.openai.com/account/api-keys"
target="_blank">here</a>.</small>
</div>
<input type="submit" value="Save" class="btn btn-primary">
</form>
<p id="status"></p>
<div class="footer">
<hr>
<p>Created by <a href="https://github.com/jina-ai/dev-gpt" target="_blank">Dev-GPT</a></p>
<img src="jina.png" width="32" height="32">
<p>For questions or feedback, <a href="https://www.linkedin.com/in/florian-h%C3%B6nicke-b902b6aa/"
target="_blank">contact the developer</a>.</p>
</div>
</div>
<script src="popup.js"></script>
</body>
</html>

View File

@@ -0,0 +1,35 @@
// Saving options to chrome.storage
function save_options() {
let openai_api_key = document.getElementById('openai_api_key').value;
chrome.storage.sync.set({
openai_api_key: openai_api_key
}, function() {
// Update status to let user know options were saved.
let status = document.getElementById('status');
status.textContent = 'Options saved.';
setTimeout(function() {
status.textContent = '';
}, 750);
});
}
// Restores options from chrome.storage
function restore_options() {
chrome.storage.sync.get({
openai_api_key: ''
}, function(items) {
document.getElementById('openai_api_key').value = items.openai_api_key;
});
}
document.addEventListener('DOMContentLoaded', restore_options);
document.getElementById('optionForm').addEventListener('submit', function(event) {
event.preventDefault();
save_options();
});

View File

@@ -0,0 +1,84 @@
.modify-button {
background-color: #00acee; /* Twitter Blue */
color: white;
border: none;
padding: 5px 10px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: 16px;
margin: 4px 2px;
cursor: pointer;
border-radius: 3px;
box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.5); /* Add some shadow */
}
/*!* Dynamic rainbow colors for each letter *!*/
/*@keyframes rainbow {*/
/* 0% { color: hsl(0, 100%, 50%); }*/
/* 14% { color: hsl(60, 100%, 50%); }*/
/* 28% { color: hsl(120, 100%, 50%); }*/
/* 42% { color: hsl(180, 100%, 50%); }*/
/* 57% { color: hsl(240, 100%, 50%); }*/
/* 71% { color: hsl(300, 100%, 50%); }*/
/* 85% { color: hsl(360, 100%, 50%); }*/
/* 100% { color: hsl(0, 100%, 50%); }*/
/*}*/
/*.rainbow-text {*/
/* animation: rainbow 7s linear infinite;*/
/* animation-delay: calc(.07s * var(--i));*/
/*}*/
/* Light mode colors (darker) */
@keyframes rainbow-light {
0% { color: hsl(0, 100%, 30%); }
14% { color: hsl(60, 100%, 30%); }
28% { color: hsl(120, 100%, 30%); }
42% { color: hsl(180, 100%, 30%); }
57% { color: hsl(240, 100%, 30%); }
71% { color: hsl(300, 100%, 30%); }
85% { color: hsl(360, 100%, 30%); }
100% { color: hsl(0, 100%, 30%); }
}
/* Dark mode colors (brighter) */
@keyframes rainbow-dark {
0% { color: hsl(0, 100%, 70%); }
14% { color: hsl(60, 100%, 70%); }
28% { color: hsl(120, 100%, 70%); }
42% { color: hsl(180, 100%, 70%); }
57% { color: hsl(240, 100%, 70%); }
71% { color: hsl(300, 100%, 70%); }
85% { color: hsl(360, 100%, 70%); }
100% { color: hsl(0, 100%, 70%); }
}
/* Apply light mode colors by default */
.rainbow-text {
font-size: 200%;
animation: rainbow-light 7s linear infinite;
animation-delay: calc(.07s * var(--i));
}
/* Apply dark mode colors if user prefers dark mode */
@media (prefers-color-scheme: dark) {
.rainbow-text {
animation: rainbow-dark 7s linear infinite;
animation-delay: calc(.07s * var(--i));
}
}
/*!* Rainbow colors for each letter *!*/
/*!* Rainbow colors for each letter *!*/
/*.rainbow0 { color: red; background-color: cyan; mix-blend-mode: difference; }*/
/*.rainbow1 { color: orange; background-color: blue; mix-blend-mode: difference; }*/
/*.rainbow2 { color: yellow; background-color: purple; mix-blend-mode: difference; }*/
/*.rainbow3 { color: green; background-color: magenta; mix-blend-mode: difference; }*/
/*.rainbow4 { color: blue; background-color: orange; mix-blend-mode: difference; }*/
/*.rainbow5 { color: indigo; background-color: yellow; mix-blend-mode: difference; }*/
/*.rainbow6 { color: violet; background-color: green; mix-blend-mode: difference; }*/

View File

@@ -0,0 +1 @@
curl -X 'POST' 'https://gptdeploy-02e02e4150.wolf.jina.ai/post' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"data": [{"text": "{\"tweet\":\"today is a bad day i dont like it\"}"}]}'

View File

@@ -0,0 +1,29 @@
FROM jinaai/dev-gpt:0.0.6
RUN apt-get install --no-install-recommends -y
## install requirements for the executor
COPY requirements.txt .
RUN pip -v install --compile -r requirements.txt
# setup the workspace
COPY . /workdir/
WORKDIR /workdir
RUN pytest test_microservice.py
ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]

View File

@@ -0,0 +1,15 @@
from jina import Executor, requests as jina_requests, DocumentArray
import json
from .microservice import func
class PositiveTweetModifierExecutor3163055(Executor):
def __init__(self, **kwargs):
super().__init__(**kwargs)
@jina_requests()
def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray:
for d in docs:
d.text = func(d.text)
return docs

View File

@@ -0,0 +1,23 @@
import os
import openai
openai.api_key = os.getenv("OPENAI_API_KEY")
class GPT_3_5_Turbo:
def __init__(self, system: str = ''):
self.system = system
def __call__(self, prompt: str) -> str:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{
"role": 'system',
"content": self.system
}, {
"role": 'user',
"content": prompt
}]
)
return response.choices[0]['message']['content']

View File

@@ -0,0 +1,5 @@
jtype: PositiveTweetModifierExecutor3163055
py_modules:
- __init__.py
metas:
name: PositiveTweetModifierExecutor3163055

View File

@@ -0,0 +1,37 @@
# This microservice receives an API key for OpenAI (OPENAI_API_KEY) and a tweet containing potentially passive aggressive language as input.
# It analyzes the input tweet using the OpenAI API to identify passive aggressive language and modifies the language to make it more positive without changing the meaning.
# The microservice then returns the updated, positive version of the tweet as output.
from .apis import GPT_3_5_Turbo
import json
def func(input_json: str) -> str:
# Parse the input JSON string
input_data = json.loads(input_json)
# Extract the OpenAI API key and tweet from the input data
openai_api_key = input_data["OPENAI_API_KEY"]
tweet = input_data["tweet"]
# Initialize the GPT-3.5 Turbo API
gpt_3_5_turbo = GPT_3_5_Turbo(
system=f'''
You are an AI language model that can modify tweets to make them more positive without changing their meaning.
When you receive a tweet, you will return a JSON object containing the updated, positive version of the tweet.
Example:
Input tweet: "I can't believe you did that. It's so typical of you."
Output JSON: {{"positive_tweet": "I'm surprised you did that. It's just like you!"}}
''')
# Generate the prompt for the GPT-3.5 Turbo API
prompt = f"Input tweet: {tweet}"
# Call the GPT-3.5 Turbo API with the prompt
generated_string = gpt_3_5_turbo(prompt)
# Parse the generated JSON string
output_data = json.loads(generated_string)
# Return the output JSON string
return json.dumps(output_data)

View File

@@ -0,0 +1,4 @@
jina==3.15.1.dev14
docarray==0.21.0
openai==0.27.5
pytest

View File

@@ -0,0 +1,22 @@
# This test case checks if the output of the microservice is of type 'str' for the positive_tweet property.
# Since the output of the GPT-3.5 Turbo model is not deterministic, we cannot check for the exact output.
# Instead, we will test if the output is a valid JSON string and if the 'positive_tweet' property is a string.
from .microservice import func
import json
def test_positive_tweet_type():
# Define the input JSON string
input_json = json.dumps({
"OPENAI_API_KEY": "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",
"tweet": "I can't believe you did that. It's so typical of you."
})
# Call the microservice function with the input JSON string
output_json = func(input_json)
# Parse the output JSON string
output_data = json.loads(output_json)
# Check if the 'positive_tweet' property is a string
assert isinstance(output_data["positive_tweet"], str)

View File

@@ -0,0 +1,29 @@
FROM jinaai/dev-gpt:0.0.6
RUN apt-get install --no-install-recommends -y
## install requirements for the executor
COPY requirements.txt .
RUN pip -v install --compile -r requirements.txt
# setup the workspace
COPY . /workdir/
WORKDIR /workdir
RUN pytest test_microservice.py
ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]

View File

@@ -0,0 +1,15 @@
from jina import Executor, requests as jina_requests, DocumentArray
import json
from .microservice import func
class PositiveTweetModifierExecutor3163055(Executor):
def __init__(self, **kwargs):
super().__init__(**kwargs)
@jina_requests()
def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray:
for d in docs:
d.text = func(d.text)
return docs

View File

@@ -0,0 +1,23 @@
import os
import openai
openai.api_key = os.getenv("OPENAI_API_KEY")
class GPT_3_5_Turbo:
def __init__(self, system: str = ''):
self.system = system
def __call__(self, prompt: str) -> str:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{
"role": 'system',
"content": self.system
}, {
"role": 'user',
"content": prompt
}]
)
return response.choices[0]['message']['content']

View File

@@ -0,0 +1,5 @@
jtype: PositiveTweetModifierExecutor3163055
py_modules:
- __init__.py
metas:
name: PositiveTweetModifierExecutor3163055

View File

@@ -0,0 +1,41 @@
# This microservice receives an API key for OpenAI (OPENAI_API_KEY) and a tweet containing potentially passive aggressive language as input.
# It analyzes the input tweet using the OpenAI API to identify passive aggressive language and modifies the language to make it more positive without changing the meaning.
# The microservice then returns the updated, positive version of the tweet as output.
from .apis import GPT_3_5_Turbo
import json
def func(input_json: str) -> str:
# Parse the input JSON string
input_data = json.loads(input_json)
# Extract the OpenAI API key and tweet from the input data
openai_api_key = input_data["OPENAI_API_KEY"]
tweet = input_data["tweet"]
# Initialize the GPT-3.5 Turbo API
gpt_3_5_turbo = GPT_3_5_Turbo(
system=f'''
You are an AI language model that can modify tweets to make them more positive without changing their meaning.
When you receive a tweet, you will return a JSON object containing the updated, positive version of the tweet.
Example:
Input tweet: "I can't believe you did that. It's so typical of you."
Output JSON: {{"positive_tweet": "I'm surprised you did that. It's just like you!"}}
''')
# Generate the prompt for the GPT-3.5 Turbo API
prompt = f"Input tweet: {tweet}"
# Call the GPT-3.5 Turbo API with the prompt
generated_string = gpt_3_5_turbo(prompt)
# Check if the generated_string is a valid JSON string
try:
output_data = json.loads(generated_string)
except json.JSONDecodeError:
# If the generated_string is not a valid JSON string, return an error message
return json.dumps({"error": "Invalid JSON string generated by the GPT-3.5 Turbo API"})
# Return the output JSON string
return json.dumps(output_data)

View File

@@ -0,0 +1,4 @@
jina==3.15.1.dev14
docarray==0.21.0
openai==0.27.5
pytest

View File

@@ -0,0 +1,22 @@
# This test case checks if the output of the microservice is of type 'str' for the positive_tweet property.
# Since the output of the GPT-3.5 Turbo model is not deterministic, we cannot check for the exact output.
# Instead, we will test if the output is a valid JSON string and if the 'positive_tweet' property is a string.
from .microservice import func
import json
def test_positive_tweet_type():
# Define the input JSON string
input_json = json.dumps({
"OPENAI_API_KEY": "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",
"tweet": "I can't believe you did that. It's so typical of you."
})
# Call the microservice function with the input JSON string
output_json = func(input_json)
# Parse the output JSON string
output_data = json.loads(output_json)
# Check if the 'positive_tweet' property is a string
assert isinstance(output_data["positive_tweet"], str)

View File

@@ -0,0 +1,29 @@
FROM jinaai/dev-gpt:0.0.6
RUN apt-get install --no-install-recommends -y
## install requirements for the executor
COPY requirements.txt .
RUN pip -v install --compile -r requirements.txt
# setup the workspace
COPY . /workdir/
WORKDIR /workdir
RUN pytest test_microservice.py
ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]

View File

@@ -0,0 +1,15 @@
from jina import Executor, requests as jina_requests, DocumentArray
import json
from .microservice import func
class PositiveTweetModifierExecutor3163055(Executor):
def __init__(self, **kwargs):
super().__init__(**kwargs)
@jina_requests()
def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray:
for d in docs:
d.text = func(d.text)
return docs

View File

@@ -0,0 +1,23 @@
import os
import openai
class GPT_3_5_Turbo:
def __init__(self, system: str = ''):
self.system = system
def __call__(self, prompt: str) -> str:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{
"role": 'system',
"content": self.system
}, {
"role": 'user',
"content": prompt
}]
)
return response.choices[0]['message']['content']

View File

@@ -0,0 +1,5 @@
jtype: PositiveTweetModifierExecutor3163055
py_modules:
- __init__.py
metas:
name: PositiveTweetModifierExecutor3163055

View File

@@ -0,0 +1,20 @@
jtype: Flow
with:
port: 8080
protocol: http
jcloud:
version: 3.15.1.dev14
labels:
creator: microchain
name: gptdeploy
gateway:
uses: jinaai+docker://auth0-unified-448f11965ce142b6/GatewayPositiveTweetModifierExecutor3163055:latest
executors:
- name: positivetweetmodifierexecutor3163055
uses: jinaai+docker://auth0-unified-448f11965ce142b6/PositiveTweetModifierExecutor3163055:latest
jcloud:
resources:
instance: C2
capacity: spot

View File

@@ -0,0 +1,14 @@
FROM jinaai/jina:3.15.1-dev14-py39-standard
RUN apt-get update && apt-get install --no-install-recommends -y git pip nginx && rm -rf /var/lib/apt/lists/*
## install requirements for the executor
COPY requirements.txt .
RUN pip install --compile -r requirements.txt
# setup the workspace
COPY . /workdir/
WORKDIR /workdir
ENTRYPOINT ["jina", "gateway", "--uses", "config.yml"]

View File

@@ -0,0 +1,58 @@
import json
import os
import streamlit as st
from jina import Client, Document, DocumentArray
# Set the favicon and title
st.set_page_config(
page_title="Positive Tweet Modifier",
page_icon=":smiley:",
layout="wide",
)
# Define the input dictionary
INPUT_DICTIONARY = {
"OPENAI_API_KEY": "<your api key>",
"tweet": "I can't believe you did that. It's so typical of you.",
}
# Define the function to send a request to the microservice
def send_request(input_dict):
client = Client(host='http://localhost:8080')
d = Document(text=json.dumps(input_dict))
response = client.post('/', inputs=DocumentArray([d]))
return response[0].text
# Create the UI
st.title("Positive Tweet Modifier :speech_balloon:")
st.write("Transform negative tweets into positive ones using GPT-3.5 Turbo! :sunglasses:")
# Input form
st.subheader("Input")
tweet = st.text_area("Enter a negative tweet:", value=INPUT_DICTIONARY["tweet"], height=100)
api_key = st.text_input("Enter your OPENAI_API_KEY:", value=INPUT_DICTIONARY["OPENAI_API_KEY"])
# Send request button
if st.button("Transform Tweet"):
INPUT_DICTIONARY["tweet"] = tweet
INPUT_DICTIONARY["OPENAI_API_KEY"] = api_key
response_text = send_request(INPUT_DICTIONARY)
response_data = json.loads(response_text)
# Display the result
st.subheader("Result")
st.write(f"Positive Tweet: {response_data['positive_tweet']} :thumbsup:")
# Deploy your own microservice
st.markdown(
"Want to deploy your own microservice? [Click here!](https://github.com/jina-ai/dev-gpt)"
)
# Display the curl command
deployment_id = os.environ.get("K8S_NAMESPACE_NAME", "")
host = f'https://dev-gpt-{deployment_id.split("-")[1]}.wolf.jina.ai/post' if deployment_id else "http://localhost:8080/post"
with st.expander("See curl command"):
st.code(
f'curl -X \'POST\' \'{host}\' -H \'accept: application/json\' -H \'Content-Type: application/json\' -d \'{{"data": [{{"text": "hello, world!"}}]}}\'',
language='bash'
)

View File

@@ -0,0 +1,4 @@
[server]
baseUrlPath = "/playground"
headless = true

View File

@@ -0,0 +1,5 @@
jtype: GatewayPositiveTweetModifierExecutor3163055
py_modules:
- custom_gateway.py
metas:
name: GatewayPositiveTweetModifierExecutor3163055

View File

@@ -0,0 +1,154 @@
import os
import shutil
import subprocess
from time import sleep
from typing import List, Tuple
import streamlit.web.bootstrap
from jina import Gateway
from jina.serve.runtimes.gateway.composite import CompositeGateway
from streamlit.file_util import get_streamlit_file_path
from streamlit.web.server import Server as StreamlitServer
cur_dir = os.path.dirname(__file__)
def cmd(command, std_output=False, wait=True):
if isinstance(command, str):
command = command.split()
if not std_output:
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
else:
process = subprocess.Popen(command)
if wait:
output, error = process.communicate()
return output, error
class PlaygroundGateway(Gateway):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.streamlit_script = 'app.py'
# copy playground/config.toml to streamlit config.toml
streamlit_config_toml_src = os.path.join(cur_dir, 'app_config.toml')
streamlit_config_toml_dest = get_streamlit_file_path("config.toml")
# create streamlit_config_toml_dest if it doesn't exist
os.makedirs(os.path.dirname(streamlit_config_toml_dest), exist_ok=True)
shutil.copyfile(streamlit_config_toml_src, streamlit_config_toml_dest)
async def setup_server(self):
streamlit.web.bootstrap._fix_sys_path(self.streamlit_script)
streamlit.web.bootstrap._fix_matplotlib_crash()
streamlit.web.bootstrap._fix_tornado_crash()
streamlit.web.bootstrap._fix_sys_argv(self.streamlit_script, ())
streamlit.web.bootstrap._fix_pydeck_mapbox_api_warning()
streamlit_cmd = f'streamlit run {self.streamlit_script}'
self.streamlit_server = StreamlitServer(
os.path.join(cur_dir, self.streamlit_script), streamlit_cmd
)
async def run_server(self):
await self.streamlit_server.start()
streamlit.web.bootstrap._on_server_start(self.streamlit_server)
streamlit.web.bootstrap._set_up_signal_handler(self.streamlit_server)
async def shutdown(self):
self.streamlit_server.stop()
class GatewayPositiveTweetModifierExecutor3163055(CompositeGateway):
"""The CustomGateway assumes that the gateway has been started with http on port 8080.
This is the port on which the nginx process listens. After nginx has been started,
it will start the playground on port 8501 and the actual HTTP gateway will start on port 8082.
Nginx is configured to route the requests in the following way:
- /playground -> playground on port 8501
- / -> HTTP gateway on port 8082
"""
def __init__(self, **kwargs):
# need to update port to 8082, as nginx will listen on 8080
http_idx = 0
http_port = kwargs['runtime_args']['port'][http_idx]
if kwargs['runtime_args']['port'][http_idx] != 8080:
raise ValueError(
f'Please, let http port ({http_port}) be 8080 for nginx to work'
)
kwargs['runtime_args']['port'][http_idx] = 8082
kwargs['cors'] = True
super().__init__(**kwargs)
# remove potential clashing arguments from kwargs
kwargs.pop("port", None)
kwargs.pop("protocol", None)
# note order is important
self._add_gateway(
PlaygroundGateway,
8501,
**kwargs,
)
self.setup_nginx()
self.nginx_was_shutdown = False
async def shutdown(self):
await super().shutdown()
if not self.nginx_was_shutdown:
self.shutdown_nginx()
self.nginx_was_shutdown = True
def setup_nginx(self):
command = [
'nginx',
'-c',
os.path.join(cur_dir, '', 'nginx.conf'),
]
output, error = self._run_nginx_command(command)
self.logger.info('Nginx started')
self.logger.info(f'nginx output: {output}')
self.logger.info(f'nginx error: {error}')
def shutdown_nginx(self):
command = ['nginx', '-s', 'stop']
output, error = self._run_nginx_command(command)
self.logger.info('Nginx stopped')
self.logger.info(f'nginx output: {output}')
self.logger.info(f'nginx error: {error}')
def _run_nginx_command(self, command: List[str]) -> Tuple[bytes, bytes]:
self.logger.info(f'Running command: {command}')
output, error = cmd(command)
if error != b'':
# on CI we need to use sudo; using NOW_CI_RUN isn't good if running test locally
self.logger.info(f'nginx error: {error}')
command.insert(0, 'sudo')
self.logger.info(f'So running command: {command}')
output, error = cmd(command)
sleep(10)
return output, error
def _add_gateway(self, gateway_cls, port, protocol='http', **kwargs):
# ignore metrics_registry since it is not copyable
runtime_args = self._deepcopy_with_ignore_attrs(
self.runtime_args,
[
'metrics_registry',
'tracer_provider',
'grpc_tracing_server_interceptors',
'aio_tracing_client_interceptors',
'tracing_client_interceptor',
'monitoring', # disable it for fastapi gateway
],
)
runtime_args.port = [port]
runtime_args.protocol = [protocol]
gateway_kwargs = {k: v for k, v in kwargs.items() if k != 'runtime_args'}
gateway_kwargs['runtime_args'] = dict(vars(runtime_args))
gateway = gateway_cls(**gateway_kwargs)
gateway.streamer = self.streamer
self.gateways.insert(0, gateway)

View File

@@ -0,0 +1,62 @@
events {
worker_connections 4096; ## Default: 1024
}
http {
server {
listen 8080;
server_name localhost;
# from https://medium.com/@dasirra/using-streamlit-nginx-docker-to-build-and-put-in-production-dashboards-in-aws-lightsail-781dab8f2836
location ^~ /static {
proxy_pass http://localhost:8501/static/;
}
location ^~ /healthz {
proxy_pass http://localhost:8501/healthz;
}
location ^~ /vendor {
proxy_pass http://localhost:8501/vendor;
}
location ^~ /st-allowed-message-origins {
proxy_pass http://localhost:8501/st-allowed-message-origins;
}
# for jcloud deployment, very important; actually talks via websocket
location ^~ /stream {
# inspired from https://discuss.streamlit.io/t/how-to-use-streamlit-with-nginx/378/7
proxy_pass http://localhost:8501/stream;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
location ^~ /favicon.png {
proxy_pass http://localhost:8501/favicon.png;
}
# to make extra components work
location ^~ /component {
proxy_pass http://localhost:8501/component;
}
location /playground {
# streamlit specific from https://discuss.streamlit.io/t/streamlit-docker-nginx-ssl-https/2195
proxy_http_version 1.1;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
proxy_pass http://localhost:8501;
client_max_body_size 50M;
}
location / {
proxy_pass http://localhost:8082;
client_max_body_size 50M;
}
}
}

View File

@@ -0,0 +1,4 @@
streamlit==1.16.0
altair==4.2.2
extra-streamlit-components==0.1.55
jina==3.15.1.dev14

View File

@@ -0,0 +1,45 @@
# This microservice receives an API key for OpenAI (OPENAI_API_KEY) and a tweet containing potentially passive aggressive language as input.
# It analyzes the input tweet using the OpenAI API to identify passive aggressive language and modifies the language to make it more positive without changing the meaning.
# The microservice then returns the updated, positive version of the tweet as output.
import os
import openai
from .apis import GPT_3_5_Turbo
import json
def func(input_json: str) -> str:
# Parse the input JSON string
input_data = json.loads(input_json)
# Extract the OpenAI API key and tweet from the input data
openai.api_key = input_data["OPENAI_API_KEY"]
print('key updated: ', input_data["OPENAI_API_KEY"])
tweet = input_data["tweet"]
# Initialize the GPT-3.5 Turbo API
gpt_3_5_turbo = GPT_3_5_Turbo(
system=f'''
You are an AI language model that can modify tweets to make them more positive without changing their meaning.
When you receive a tweet, you will return a JSON object containing the updated, positive version of the tweet.
Example:
Input tweet: "I can't believe you did that. It's so typical of you."
Output JSON: {{"positive_tweet": "I'm surprised you did that. It's just like you!"}}
''')
# Generate the prompt for the GPT-3.5 Turbo API
prompt = f"Input tweet: {tweet}\nOutput JSON:"
# Call the GPT-3.5 Turbo API with the prompt
generated_string = gpt_3_5_turbo(prompt)
# Check if the generated_string is a valid JSON string
try:
output_data = json.loads(generated_string)
except json.JSONDecodeError:
# If the generated_string is not a valid JSON string, return an error message
return json.dumps({"error": "Invalid JSON string generated by the GPT-3.5 Turbo API"})
# Return the output JSON string
return json.dumps(output_data)

View File

@@ -0,0 +1,4 @@
jina==3.15.1.dev14
docarray==0.21.0
openai==0.27.5
pytest

View File

@@ -0,0 +1,5 @@
from jina import Flow
flow = Flow.load_config('flow.yml')
with flow:
flow.block()

View File

@@ -0,0 +1,22 @@
# This test case checks if the output of the microservice is of type 'str' for the positive_tweet property.
# Since the output of the GPT-3.5 Turbo model is not deterministic, we cannot check for the exact output.
# Instead, we will test if the output is a valid JSON string and if the 'positive_tweet' property is a string.
from .microservice import func
import json
def test_positive_tweet_type():
# Define the input JSON string
input_json = json.dumps({
"OPENAI_API_KEY": "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",
"tweet": "I can't believe you did that. It's so typical of you."
})
# Call the microservice function with the input JSON string
output_json = func(input_json)
# Parse the output JSON string
output_data = json.loads(output_json)
# Check if the 'positive_tweet' property is a string
assert isinstance(output_data["positive_tweet"], str)

View File

@@ -0,0 +1 @@
PositiveTweetModifierExecutor

View File

@@ -0,0 +1,5 @@
[
["openai"],
["openai", "transformers"],
["openai", "textblob"]
]

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB