diff --git a/examples/rainbow_tweet/chrome_extension/content.js b/examples/rainbow_tweet/chrome_extension/content.js
new file mode 100644
index 0000000..32fb575
--- /dev/null
+++ b/examples/rainbow_tweet/chrome_extension/content.js
@@ -0,0 +1,77 @@
+console.log('Twitter Rewrite: Content script loaded');
+let openai_api_key = '';
+
+// Get OPENAI_API_KEY from chrome storage
+chrome.storage.sync.get({
+ openai_api_key: ''
+}, function(items) {
+ openai_api_key = items.openai_api_key;
+});
+let observer = new MutationObserver((mutations) => {
+ console.log('Twitter Rewrite: DOM mutation detected');
+ // For each mutation
+ mutations.forEach((mutation) => {
+ // If nodes were added
+ if (mutation.addedNodes) {
+ mutation.addedNodes.forEach((node) => {
+ // If the added node (or its descendants) contains a tweet
+ let tweets = node.querySelectorAll('[data-testid="tweetText"]');
+ tweets.forEach((tweet) => {
+ // If the tweet doesn't already have a modify button
+ if (!tweet.querySelector('.modify-button')) {
+ // Create new button
+ let button = document.createElement('button');
+ if (openai_api_key === '') {
+ button.innerText = 'Set OPENAI_API_KEY by clicking the extension icon';
+ button.disabled = true;
+ } else {
+ button.innerText = '🦄';
+ button.disabled = false;
+ }
+ button.className = 'modify-button';
+
+ // Add event listener for button click
+ button.addEventListener('click', function() {
+ // Send tweet to API
+ let originalTweet = tweet.innerText;
+ this.disabled = true;
+ this.innerText = 'Loading...';
+ fetch('https://gptdeploy-61694dd6a3.wolf.jina.ai/post', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'accept': 'application/json'
+ },
+ body: JSON.stringify({
+ "data": [{"text": JSON.stringify({
+ "tweet": originalTweet,
+ "OPENAI_API_KEY": openai_api_key
+ }) }]
+ })
+ })
+ .then(response => response.json())
+ .then(data => {
+ let modifiedTweet = JSON.parse(data.data[0].text).positive_tweet;
+ let rainbowTweet = Array.from(modifiedTweet).map((char, i) =>
+ `${char}`
+ ).join('');
+
+ // Create a new element node to contain the HTML
+ let newTweet = document.createElement('span');
+ newTweet.innerHTML = rainbowTweet;
+ // Replace the old text node with the new element node
+ tweet.replaceWith(newTweet);
+ });
+ });
+
+ // Inject button into tweet
+ tweet.appendChild(button);
+ }
+ });
+ });
+ }
+ });
+});
+
+// Start observing the document with the configured parameters
+observer.observe(document.body, { childList: true, subtree: true });
diff --git a/examples/rainbow_tweet/chrome_extension/jina.png b/examples/rainbow_tweet/chrome_extension/jina.png
new file mode 100644
index 0000000..6fe6d80
Binary files /dev/null and b/examples/rainbow_tweet/chrome_extension/jina.png differ
diff --git a/examples/rainbow_tweet/chrome_extension/logo.png b/examples/rainbow_tweet/chrome_extension/logo.png
new file mode 100644
index 0000000..803fbea
Binary files /dev/null and b/examples/rainbow_tweet/chrome_extension/logo.png differ
diff --git a/examples/rainbow_tweet/chrome_extension/manifest.json b/examples/rainbow_tweet/chrome_extension/manifest.json
new file mode 100644
index 0000000..e2429db
--- /dev/null
+++ b/examples/rainbow_tweet/chrome_extension/manifest.json
@@ -0,0 +1,27 @@
+{
+ "manifest_version": 3,
+ "name": "Rainbow-Tweet",
+ "description": "The Rainbow-Tweet plugin allows the user to convert any tweet into positive language by clicking a button on the tweet.",
+ "version": "0.0.0.1",
+ "icons": {
+ "128": "logo.png"
+ },
+ "action": {
+ "default_icon": {
+ "128": "logo.png"
+ },
+ "default_title": "Configure API Key",
+ "default_popup": "popup.html"
+ },
+ "permissions": [
+ "storage"
+ ],
+ "content_scripts": [
+ {
+ "matches": ["https://twitter.com/*"],
+ "js": ["content.js"],
+ "css": ["styles.css"],
+ "run_at": "document_end"
+ }
+ ]
+}
diff --git a/examples/rainbow_tweet/chrome_extension/popup.css b/examples/rainbow_tweet/chrome_extension/popup.css
new file mode 100644
index 0000000..00e57df
--- /dev/null
+++ b/examples/rainbow_tweet/chrome_extension/popup.css
@@ -0,0 +1,37 @@
+body {
+ font-family: Arial, sans-serif;
+}
+
+.container {
+ width: 300px;
+ padding: 20px;
+}
+
+h1 {
+ color: #444;
+}
+
+.btn {
+ color: white;
+ background-color: #1da1f2;
+ border: none;
+ padding: 10px 20px;
+ margin-top: 10px;
+ cursor: pointer;
+}
+.footer {
+ margin-top: 20px;
+ text-align: center;
+}
+.btn:hover {
+ background-color: #0c84d2;
+}
+
+.form-group {
+ margin-bottom: 15px;
+}
+
+.form-text {
+ font-size: 0.875em;
+ color: #6c757d;
+}
diff --git a/examples/rainbow_tweet/chrome_extension/popup.html b/examples/rainbow_tweet/chrome_extension/popup.html
new file mode 100644
index 0000000..378bf6b
--- /dev/null
+++ b/examples/rainbow_tweet/chrome_extension/popup.html
@@ -0,0 +1,31 @@
+
+
+
+ Twitter Rewrite: Extension Options
+
+
+
+
+
Twitter Rewrite: Extension Options
+
+
+
+
+
+
+
diff --git a/examples/rainbow_tweet/chrome_extension/popup.js b/examples/rainbow_tweet/chrome_extension/popup.js
new file mode 100644
index 0000000..c2f6b68
--- /dev/null
+++ b/examples/rainbow_tweet/chrome_extension/popup.js
@@ -0,0 +1,35 @@
+// Saving options to chrome.storage
+function save_options() {
+ let openai_api_key = document.getElementById('openai_api_key').value;
+ chrome.storage.sync.set({
+ openai_api_key: openai_api_key
+ }, function() {
+ // Update status to let user know options were saved.
+ let status = document.getElementById('status');
+ status.textContent = 'Options saved.';
+ setTimeout(function() {
+ status.textContent = '';
+ }, 750);
+ });
+}
+
+// Restores options from chrome.storage
+function restore_options() {
+ chrome.storage.sync.get({
+ openai_api_key: ''
+ }, function(items) {
+ document.getElementById('openai_api_key').value = items.openai_api_key;
+ });
+}
+
+document.addEventListener('DOMContentLoaded', restore_options);
+document.getElementById('optionForm').addEventListener('submit', function(event) {
+ event.preventDefault();
+ save_options();
+});
+
+
+
+
+
+
diff --git a/examples/rainbow_tweet/chrome_extension/styles.css b/examples/rainbow_tweet/chrome_extension/styles.css
new file mode 100644
index 0000000..b2bf658
--- /dev/null
+++ b/examples/rainbow_tweet/chrome_extension/styles.css
@@ -0,0 +1,84 @@
+.modify-button {
+ background-color: #00acee; /* Twitter Blue */
+ color: white;
+ border: none;
+ padding: 5px 10px;
+ text-align: center;
+ text-decoration: none;
+ display: inline-block;
+ font-size: 16px;
+ margin: 4px 2px;
+ cursor: pointer;
+ border-radius: 3px;
+ box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.5); /* Add some shadow */
+}
+
+
+/*!* Dynamic rainbow colors for each letter *!*/
+/*@keyframes rainbow {*/
+/* 0% { color: hsl(0, 100%, 50%); }*/
+/* 14% { color: hsl(60, 100%, 50%); }*/
+/* 28% { color: hsl(120, 100%, 50%); }*/
+/* 42% { color: hsl(180, 100%, 50%); }*/
+/* 57% { color: hsl(240, 100%, 50%); }*/
+/* 71% { color: hsl(300, 100%, 50%); }*/
+/* 85% { color: hsl(360, 100%, 50%); }*/
+/* 100% { color: hsl(0, 100%, 50%); }*/
+/*}*/
+
+
+
+/*.rainbow-text {*/
+/* animation: rainbow 7s linear infinite;*/
+/* animation-delay: calc(.07s * var(--i));*/
+/*}*/
+
+/* Light mode colors (darker) */
+@keyframes rainbow-light {
+ 0% { color: hsl(0, 100%, 30%); }
+ 14% { color: hsl(60, 100%, 30%); }
+ 28% { color: hsl(120, 100%, 30%); }
+ 42% { color: hsl(180, 100%, 30%); }
+ 57% { color: hsl(240, 100%, 30%); }
+ 71% { color: hsl(300, 100%, 30%); }
+ 85% { color: hsl(360, 100%, 30%); }
+ 100% { color: hsl(0, 100%, 30%); }
+}
+
+/* Dark mode colors (brighter) */
+@keyframes rainbow-dark {
+ 0% { color: hsl(0, 100%, 70%); }
+ 14% { color: hsl(60, 100%, 70%); }
+ 28% { color: hsl(120, 100%, 70%); }
+ 42% { color: hsl(180, 100%, 70%); }
+ 57% { color: hsl(240, 100%, 70%); }
+ 71% { color: hsl(300, 100%, 70%); }
+ 85% { color: hsl(360, 100%, 70%); }
+ 100% { color: hsl(0, 100%, 70%); }
+}
+
+/* Apply light mode colors by default */
+.rainbow-text {
+ font-size: 200%;
+ animation: rainbow-light 7s linear infinite;
+ animation-delay: calc(.07s * var(--i));
+}
+
+/* Apply dark mode colors if user prefers dark mode */
+@media (prefers-color-scheme: dark) {
+ .rainbow-text {
+ animation: rainbow-dark 7s linear infinite;
+ animation-delay: calc(.07s * var(--i));
+ }
+}
+
+
+/*!* Rainbow colors for each letter *!*/
+/*!* Rainbow colors for each letter *!*/
+/*.rainbow0 { color: red; background-color: cyan; mix-blend-mode: difference; }*/
+/*.rainbow1 { color: orange; background-color: blue; mix-blend-mode: difference; }*/
+/*.rainbow2 { color: yellow; background-color: purple; mix-blend-mode: difference; }*/
+/*.rainbow3 { color: green; background-color: magenta; mix-blend-mode: difference; }*/
+/*.rainbow4 { color: blue; background-color: orange; mix-blend-mode: difference; }*/
+/*.rainbow5 { color: indigo; background-color: yellow; mix-blend-mode: difference; }*/
+/*.rainbow6 { color: violet; background-color: green; mix-blend-mode: difference; }*/
diff --git a/examples/rainbow_tweet/example_call.bash b/examples/rainbow_tweet/example_call.bash
new file mode 100644
index 0000000..9204f0b
--- /dev/null
+++ b/examples/rainbow_tweet/example_call.bash
@@ -0,0 +1 @@
+curl -X 'POST' 'https://gptdeploy-02e02e4150.wolf.jina.ai/post' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"data": [{"text": "{\"tweet\":\"today is a bad day i dont like it\"}"}]}'
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/__init__.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/Dockerfile b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/Dockerfile
new file mode 100644
index 0000000..c752b8e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/Dockerfile
@@ -0,0 +1,29 @@
+FROM jinaai/dev-gpt:0.0.6
+
+
+
+RUN apt-get install --no-install-recommends -y
+
+
+
+## install requirements for the executor
+
+COPY requirements.txt .
+
+RUN pip -v install --compile -r requirements.txt
+
+
+
+# setup the workspace
+
+COPY . /workdir/
+
+WORKDIR /workdir
+
+
+
+RUN pytest test_microservice.py
+
+
+
+ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/__init__.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/__init__.py
new file mode 100644
index 0000000..8a4eb04
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/__init__.py
@@ -0,0 +1,15 @@
+from jina import Executor, requests as jina_requests, DocumentArray
+import json
+
+from .microservice import func
+
+
+class PositiveTweetModifierExecutor3163055(Executor):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ @jina_requests()
+ def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray:
+ for d in docs:
+ d.text = func(d.text)
+ return docs
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/apis.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/apis.py
new file mode 100644
index 0000000..24dcb01
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/apis.py
@@ -0,0 +1,23 @@
+import os
+import openai
+
+
+openai.api_key = os.getenv("OPENAI_API_KEY")
+
+
+class GPT_3_5_Turbo:
+ def __init__(self, system: str = ''):
+ self.system = system
+
+ def __call__(self, prompt: str) -> str:
+ response = openai.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ messages=[{
+ "role": 'system',
+ "content": self.system
+ }, {
+ "role": 'user',
+ "content": prompt
+ }]
+ )
+ return response.choices[0]['message']['content']
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/config.yml b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/config.yml
new file mode 100644
index 0000000..36e015e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/config.yml
@@ -0,0 +1,5 @@
+jtype: PositiveTweetModifierExecutor3163055
+py_modules:
+ - __init__.py
+metas:
+ name: PositiveTweetModifierExecutor3163055
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/microservice.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/microservice.py
new file mode 100644
index 0000000..8d0b8d1
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/microservice.py
@@ -0,0 +1,37 @@
+# This microservice receives an API key for OpenAI (OPENAI_API_KEY) and a tweet containing potentially passive aggressive language as input.
+# It analyzes the input tweet using the OpenAI API to identify passive aggressive language and modifies the language to make it more positive without changing the meaning.
+# The microservice then returns the updated, positive version of the tweet as output.
+
+from .apis import GPT_3_5_Turbo
+import json
+
+
+def func(input_json: str) -> str:
+ # Parse the input JSON string
+ input_data = json.loads(input_json)
+
+ # Extract the OpenAI API key and tweet from the input data
+ openai_api_key = input_data["OPENAI_API_KEY"]
+ tweet = input_data["tweet"]
+
+ # Initialize the GPT-3.5 Turbo API
+ gpt_3_5_turbo = GPT_3_5_Turbo(
+ system=f'''
+You are an AI language model that can modify tweets to make them more positive without changing their meaning.
+When you receive a tweet, you will return a JSON object containing the updated, positive version of the tweet.
+Example:
+Input tweet: "I can't believe you did that. It's so typical of you."
+Output JSON: {{"positive_tweet": "I'm surprised you did that. It's just like you!"}}
+''')
+
+ # Generate the prompt for the GPT-3.5 Turbo API
+ prompt = f"Input tweet: {tweet}"
+
+ # Call the GPT-3.5 Turbo API with the prompt
+ generated_string = gpt_3_5_turbo(prompt)
+
+ # Parse the generated JSON string
+ output_data = json.loads(generated_string)
+
+ # Return the output JSON string
+ return json.dumps(output_data)
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/requirements.txt b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/requirements.txt
new file mode 100644
index 0000000..054deb5
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/requirements.txt
@@ -0,0 +1,4 @@
+jina==3.15.1.dev14
+docarray==0.21.0
+openai==0.27.5
+pytest
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/test_microservice.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/test_microservice.py
new file mode 100644
index 0000000..95c276f
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v1/test_microservice.py
@@ -0,0 +1,22 @@
+# This test case checks if the output of the microservice is of type 'str' for the positive_tweet property.
+# Since the output of the GPT-3.5 Turbo model is not deterministic, we cannot check for the exact output.
+# Instead, we will test if the output is a valid JSON string and if the 'positive_tweet' property is a string.
+
+from .microservice import func
+import json
+
+def test_positive_tweet_type():
+ # Define the input JSON string
+ input_json = json.dumps({
+ "OPENAI_API_KEY": "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",
+ "tweet": "I can't believe you did that. It's so typical of you."
+ })
+
+ # Call the microservice function with the input JSON string
+ output_json = func(input_json)
+
+ # Parse the output JSON string
+ output_data = json.loads(output_json)
+
+ # Check if the 'positive_tweet' property is a string
+ assert isinstance(output_data["positive_tweet"], str)
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/Dockerfile b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/Dockerfile
new file mode 100644
index 0000000..c752b8e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/Dockerfile
@@ -0,0 +1,29 @@
+FROM jinaai/dev-gpt:0.0.6
+
+
+
+RUN apt-get install --no-install-recommends -y
+
+
+
+## install requirements for the executor
+
+COPY requirements.txt .
+
+RUN pip -v install --compile -r requirements.txt
+
+
+
+# setup the workspace
+
+COPY . /workdir/
+
+WORKDIR /workdir
+
+
+
+RUN pytest test_microservice.py
+
+
+
+ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/__init__.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/__init__.py
new file mode 100644
index 0000000..8a4eb04
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/__init__.py
@@ -0,0 +1,15 @@
+from jina import Executor, requests as jina_requests, DocumentArray
+import json
+
+from .microservice import func
+
+
+class PositiveTweetModifierExecutor3163055(Executor):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ @jina_requests()
+ def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray:
+ for d in docs:
+ d.text = func(d.text)
+ return docs
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/apis.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/apis.py
new file mode 100644
index 0000000..24dcb01
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/apis.py
@@ -0,0 +1,23 @@
+import os
+import openai
+
+
+openai.api_key = os.getenv("OPENAI_API_KEY")
+
+
+class GPT_3_5_Turbo:
+ def __init__(self, system: str = ''):
+ self.system = system
+
+ def __call__(self, prompt: str) -> str:
+ response = openai.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ messages=[{
+ "role": 'system',
+ "content": self.system
+ }, {
+ "role": 'user',
+ "content": prompt
+ }]
+ )
+ return response.choices[0]['message']['content']
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/config.yml b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/config.yml
new file mode 100644
index 0000000..36e015e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/config.yml
@@ -0,0 +1,5 @@
+jtype: PositiveTweetModifierExecutor3163055
+py_modules:
+ - __init__.py
+metas:
+ name: PositiveTweetModifierExecutor3163055
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/microservice.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/microservice.py
new file mode 100644
index 0000000..06bd4a0
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/microservice.py
@@ -0,0 +1,41 @@
+# This microservice receives an API key for OpenAI (OPENAI_API_KEY) and a tweet containing potentially passive aggressive language as input.
+# It analyzes the input tweet using the OpenAI API to identify passive aggressive language and modifies the language to make it more positive without changing the meaning.
+# The microservice then returns the updated, positive version of the tweet as output.
+
+from .apis import GPT_3_5_Turbo
+import json
+
+
+def func(input_json: str) -> str:
+ # Parse the input JSON string
+ input_data = json.loads(input_json)
+
+ # Extract the OpenAI API key and tweet from the input data
+ openai_api_key = input_data["OPENAI_API_KEY"]
+ tweet = input_data["tweet"]
+
+ # Initialize the GPT-3.5 Turbo API
+ gpt_3_5_turbo = GPT_3_5_Turbo(
+ system=f'''
+You are an AI language model that can modify tweets to make them more positive without changing their meaning.
+When you receive a tweet, you will return a JSON object containing the updated, positive version of the tweet.
+Example:
+Input tweet: "I can't believe you did that. It's so typical of you."
+Output JSON: {{"positive_tweet": "I'm surprised you did that. It's just like you!"}}
+''')
+
+ # Generate the prompt for the GPT-3.5 Turbo API
+ prompt = f"Input tweet: {tweet}"
+
+ # Call the GPT-3.5 Turbo API with the prompt
+ generated_string = gpt_3_5_turbo(prompt)
+
+ # Check if the generated_string is a valid JSON string
+ try:
+ output_data = json.loads(generated_string)
+ except json.JSONDecodeError:
+ # If the generated_string is not a valid JSON string, return an error message
+ return json.dumps({"error": "Invalid JSON string generated by the GPT-3.5 Turbo API"})
+
+ # Return the output JSON string
+ return json.dumps(output_data)
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/requirements.txt b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/requirements.txt
new file mode 100644
index 0000000..054deb5
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/requirements.txt
@@ -0,0 +1,4 @@
+jina==3.15.1.dev14
+docarray==0.21.0
+openai==0.27.5
+pytest
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/test_microservice.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/test_microservice.py
new file mode 100644
index 0000000..95c276f
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v2/test_microservice.py
@@ -0,0 +1,22 @@
+# This test case checks if the output of the microservice is of type 'str' for the positive_tweet property.
+# Since the output of the GPT-3.5 Turbo model is not deterministic, we cannot check for the exact output.
+# Instead, we will test if the output is a valid JSON string and if the 'positive_tweet' property is a string.
+
+from .microservice import func
+import json
+
+def test_positive_tweet_type():
+ # Define the input JSON string
+ input_json = json.dumps({
+ "OPENAI_API_KEY": "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",
+ "tweet": "I can't believe you did that. It's so typical of you."
+ })
+
+ # Call the microservice function with the input JSON string
+ output_json = func(input_json)
+
+ # Parse the output JSON string
+ output_data = json.loads(output_json)
+
+ # Check if the 'positive_tweet' property is a string
+ assert isinstance(output_data["positive_tweet"], str)
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/Dockerfile b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/Dockerfile
new file mode 100644
index 0000000..c752b8e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/Dockerfile
@@ -0,0 +1,29 @@
+FROM jinaai/dev-gpt:0.0.6
+
+
+
+RUN apt-get install --no-install-recommends -y
+
+
+
+## install requirements for the executor
+
+COPY requirements.txt .
+
+RUN pip -v install --compile -r requirements.txt
+
+
+
+# setup the workspace
+
+COPY . /workdir/
+
+WORKDIR /workdir
+
+
+
+RUN pytest test_microservice.py
+
+
+
+ENTRYPOINT ["jina", "executor", "--uses", "config.yml"]
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/__init__.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/__init__.py
new file mode 100644
index 0000000..8a4eb04
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/__init__.py
@@ -0,0 +1,15 @@
+from jina import Executor, requests as jina_requests, DocumentArray
+import json
+
+from .microservice import func
+
+
+class PositiveTweetModifierExecutor3163055(Executor):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ @jina_requests()
+ def endpoint(self, docs: DocumentArray, **kwargs) -> DocumentArray:
+ for d in docs:
+ d.text = func(d.text)
+ return docs
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/apis.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/apis.py
new file mode 100644
index 0000000..89a0768
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/apis.py
@@ -0,0 +1,23 @@
+import os
+import openai
+
+
+
+
+
+class GPT_3_5_Turbo:
+ def __init__(self, system: str = ''):
+ self.system = system
+
+ def __call__(self, prompt: str) -> str:
+ response = openai.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ messages=[{
+ "role": 'system',
+ "content": self.system
+ }, {
+ "role": 'user',
+ "content": prompt
+ }]
+ )
+ return response.choices[0]['message']['content']
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/config.yml b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/config.yml
new file mode 100644
index 0000000..36e015e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/config.yml
@@ -0,0 +1,5 @@
+jtype: PositiveTweetModifierExecutor3163055
+py_modules:
+ - __init__.py
+metas:
+ name: PositiveTweetModifierExecutor3163055
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/flow.yml b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/flow.yml
new file mode 100644
index 0000000..0eda86a
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/flow.yml
@@ -0,0 +1,20 @@
+jtype: Flow
+with:
+ port: 8080
+ protocol: http
+jcloud:
+ version: 3.15.1.dev14
+ labels:
+ creator: microchain
+ name: gptdeploy
+gateway:
+ uses: jinaai+docker://auth0-unified-448f11965ce142b6/GatewayPositiveTweetModifierExecutor3163055:latest
+
+executors:
+ - name: positivetweetmodifierexecutor3163055
+ uses: jinaai+docker://auth0-unified-448f11965ce142b6/PositiveTweetModifierExecutor3163055:latest
+
+ jcloud:
+ resources:
+ instance: C2
+ capacity: spot
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/Dockerfile b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/Dockerfile
new file mode 100644
index 0000000..660b5d7
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/Dockerfile
@@ -0,0 +1,14 @@
+FROM jinaai/jina:3.15.1-dev14-py39-standard
+
+
+RUN apt-get update && apt-get install --no-install-recommends -y git pip nginx && rm -rf /var/lib/apt/lists/*
+
+## install requirements for the executor
+COPY requirements.txt .
+RUN pip install --compile -r requirements.txt
+
+# setup the workspace
+COPY . /workdir/
+WORKDIR /workdir
+
+ENTRYPOINT ["jina", "gateway", "--uses", "config.yml"]
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/__init__.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/app.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/app.py
new file mode 100644
index 0000000..bb2c11e
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/app.py
@@ -0,0 +1,58 @@
+import json
+import os
+import streamlit as st
+from jina import Client, Document, DocumentArray
+
+# Set the favicon and title
+st.set_page_config(
+ page_title="Positive Tweet Modifier",
+ page_icon=":smiley:",
+ layout="wide",
+)
+
+# Define the input dictionary
+INPUT_DICTIONARY = {
+ "OPENAI_API_KEY": "",
+ "tweet": "I can't believe you did that. It's so typical of you.",
+}
+
+# Define the function to send a request to the microservice
+def send_request(input_dict):
+ client = Client(host='http://localhost:8080')
+ d = Document(text=json.dumps(input_dict))
+ response = client.post('/', inputs=DocumentArray([d]))
+ return response[0].text
+
+# Create the UI
+st.title("Positive Tweet Modifier :speech_balloon:")
+st.write("Transform negative tweets into positive ones using GPT-3.5 Turbo! :sunglasses:")
+
+# Input form
+st.subheader("Input")
+tweet = st.text_area("Enter a negative tweet:", value=INPUT_DICTIONARY["tweet"], height=100)
+api_key = st.text_input("Enter your OPENAI_API_KEY:", value=INPUT_DICTIONARY["OPENAI_API_KEY"])
+
+# Send request button
+if st.button("Transform Tweet"):
+ INPUT_DICTIONARY["tweet"] = tweet
+ INPUT_DICTIONARY["OPENAI_API_KEY"] = api_key
+ response_text = send_request(INPUT_DICTIONARY)
+ response_data = json.loads(response_text)
+
+ # Display the result
+ st.subheader("Result")
+ st.write(f"Positive Tweet: {response_data['positive_tweet']} :thumbsup:")
+
+# Deploy your own microservice
+st.markdown(
+ "Want to deploy your own microservice? [Click here!](https://github.com/jina-ai/dev-gpt)"
+)
+
+# Display the curl command
+deployment_id = os.environ.get("K8S_NAMESPACE_NAME", "")
+host = f'https://dev-gpt-{deployment_id.split("-")[1]}.wolf.jina.ai/post' if deployment_id else "http://localhost:8080/post"
+with st.expander("See curl command"):
+ st.code(
+ f'curl -X \'POST\' \'{host}\' -H \'accept: application/json\' -H \'Content-Type: application/json\' -d \'{{"data": [{{"text": "hello, world!"}}]}}\'',
+ language='bash'
+ )
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/app_config.toml b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/app_config.toml
new file mode 100644
index 0000000..24ef3ce
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/app_config.toml
@@ -0,0 +1,4 @@
+[server]
+
+baseUrlPath = "/playground"
+headless = true
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/config.yml b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/config.yml
new file mode 100644
index 0000000..5357216
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/config.yml
@@ -0,0 +1,5 @@
+jtype: GatewayPositiveTweetModifierExecutor3163055
+py_modules:
+ - custom_gateway.py
+metas:
+ name: GatewayPositiveTweetModifierExecutor3163055
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/custom_gateway.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/custom_gateway.py
new file mode 100644
index 0000000..d6292f7
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/custom_gateway.py
@@ -0,0 +1,154 @@
+import os
+import shutil
+import subprocess
+from time import sleep
+from typing import List, Tuple
+
+import streamlit.web.bootstrap
+from jina import Gateway
+from jina.serve.runtimes.gateway.composite import CompositeGateway
+from streamlit.file_util import get_streamlit_file_path
+from streamlit.web.server import Server as StreamlitServer
+
+
+cur_dir = os.path.dirname(__file__)
+
+
+def cmd(command, std_output=False, wait=True):
+ if isinstance(command, str):
+ command = command.split()
+ if not std_output:
+ process = subprocess.Popen(
+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ else:
+ process = subprocess.Popen(command)
+ if wait:
+ output, error = process.communicate()
+ return output, error
+
+
+class PlaygroundGateway(Gateway):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self.streamlit_script = 'app.py'
+ # copy playground/config.toml to streamlit config.toml
+ streamlit_config_toml_src = os.path.join(cur_dir, 'app_config.toml')
+ streamlit_config_toml_dest = get_streamlit_file_path("config.toml")
+ # create streamlit_config_toml_dest if it doesn't exist
+ os.makedirs(os.path.dirname(streamlit_config_toml_dest), exist_ok=True)
+ shutil.copyfile(streamlit_config_toml_src, streamlit_config_toml_dest)
+
+ async def setup_server(self):
+ streamlit.web.bootstrap._fix_sys_path(self.streamlit_script)
+ streamlit.web.bootstrap._fix_matplotlib_crash()
+ streamlit.web.bootstrap._fix_tornado_crash()
+ streamlit.web.bootstrap._fix_sys_argv(self.streamlit_script, ())
+ streamlit.web.bootstrap._fix_pydeck_mapbox_api_warning()
+ streamlit_cmd = f'streamlit run {self.streamlit_script}'
+
+ self.streamlit_server = StreamlitServer(
+ os.path.join(cur_dir, self.streamlit_script), streamlit_cmd
+ )
+
+ async def run_server(self):
+ await self.streamlit_server.start()
+ streamlit.web.bootstrap._on_server_start(self.streamlit_server)
+ streamlit.web.bootstrap._set_up_signal_handler(self.streamlit_server)
+
+ async def shutdown(self):
+ self.streamlit_server.stop()
+
+
+class GatewayPositiveTweetModifierExecutor3163055(CompositeGateway):
+ """The CustomGateway assumes that the gateway has been started with http on port 8080.
+ This is the port on which the nginx process listens. After nginx has been started,
+ it will start the playground on port 8501 and the actual HTTP gateway will start on port 8082.
+
+ Nginx is configured to route the requests in the following way:
+ - /playground -> playground on port 8501
+ - / -> HTTP gateway on port 8082
+ """
+
+ def __init__(self, **kwargs):
+ # need to update port to 8082, as nginx will listen on 8080
+ http_idx = 0
+ http_port = kwargs['runtime_args']['port'][http_idx]
+ if kwargs['runtime_args']['port'][http_idx] != 8080:
+ raise ValueError(
+ f'Please, let http port ({http_port}) be 8080 for nginx to work'
+ )
+ kwargs['runtime_args']['port'][http_idx] = 8082
+ kwargs['cors'] = True
+ super().__init__(**kwargs)
+
+ # remove potential clashing arguments from kwargs
+ kwargs.pop("port", None)
+ kwargs.pop("protocol", None)
+
+ # note order is important
+ self._add_gateway(
+ PlaygroundGateway,
+ 8501,
+ **kwargs,
+ )
+
+ self.setup_nginx()
+ self.nginx_was_shutdown = False
+
+ async def shutdown(self):
+ await super().shutdown()
+ if not self.nginx_was_shutdown:
+ self.shutdown_nginx()
+ self.nginx_was_shutdown = True
+
+ def setup_nginx(self):
+ command = [
+ 'nginx',
+ '-c',
+ os.path.join(cur_dir, '', 'nginx.conf'),
+ ]
+ output, error = self._run_nginx_command(command)
+ self.logger.info('Nginx started')
+ self.logger.info(f'nginx output: {output}')
+ self.logger.info(f'nginx error: {error}')
+
+ def shutdown_nginx(self):
+ command = ['nginx', '-s', 'stop']
+ output, error = self._run_nginx_command(command)
+ self.logger.info('Nginx stopped')
+ self.logger.info(f'nginx output: {output}')
+ self.logger.info(f'nginx error: {error}')
+
+ def _run_nginx_command(self, command: List[str]) -> Tuple[bytes, bytes]:
+ self.logger.info(f'Running command: {command}')
+ output, error = cmd(command)
+ if error != b'':
+ # on CI we need to use sudo; using NOW_CI_RUN isn't good if running test locally
+ self.logger.info(f'nginx error: {error}')
+ command.insert(0, 'sudo')
+ self.logger.info(f'So running command: {command}')
+ output, error = cmd(command)
+ sleep(10)
+ return output, error
+
+ def _add_gateway(self, gateway_cls, port, protocol='http', **kwargs):
+ # ignore metrics_registry since it is not copyable
+ runtime_args = self._deepcopy_with_ignore_attrs(
+ self.runtime_args,
+ [
+ 'metrics_registry',
+ 'tracer_provider',
+ 'grpc_tracing_server_interceptors',
+ 'aio_tracing_client_interceptors',
+ 'tracing_client_interceptor',
+ 'monitoring', # disable it for fastapi gateway
+ ],
+ )
+ runtime_args.port = [port]
+ runtime_args.protocol = [protocol]
+ gateway_kwargs = {k: v for k, v in kwargs.items() if k != 'runtime_args'}
+ gateway_kwargs['runtime_args'] = dict(vars(runtime_args))
+ gateway = gateway_cls(**gateway_kwargs)
+ gateway.streamer = self.streamer
+ self.gateways.insert(0, gateway)
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/nginx.conf b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/nginx.conf
new file mode 100644
index 0000000..e44f98d
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/nginx.conf
@@ -0,0 +1,62 @@
+events {
+ worker_connections 4096; ## Default: 1024
+}
+
+http {
+ server {
+ listen 8080;
+ server_name localhost;
+
+
+ # from https://medium.com/@dasirra/using-streamlit-nginx-docker-to-build-and-put-in-production-dashboards-in-aws-lightsail-781dab8f2836
+ location ^~ /static {
+ proxy_pass http://localhost:8501/static/;
+ }
+ location ^~ /healthz {
+ proxy_pass http://localhost:8501/healthz;
+ }
+ location ^~ /vendor {
+ proxy_pass http://localhost:8501/vendor;
+ }
+ location ^~ /st-allowed-message-origins {
+ proxy_pass http://localhost:8501/st-allowed-message-origins;
+ }
+
+ # for jcloud deployment, very important; actually talks via websocket
+ location ^~ /stream {
+ # inspired from https://discuss.streamlit.io/t/how-to-use-streamlit-with-nginx/378/7
+ proxy_pass http://localhost:8501/stream;
+ proxy_http_version 1.1;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header Host $host;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_read_timeout 86400;
+ }
+ location ^~ /favicon.png {
+ proxy_pass http://localhost:8501/favicon.png;
+ }
+ # to make extra components work
+ location ^~ /component {
+ proxy_pass http://localhost:8501/component;
+ }
+
+ location /playground {
+ # streamlit specific from https://discuss.streamlit.io/t/streamlit-docker-nginx-ssl-https/2195
+ proxy_http_version 1.1;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header Host $host;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_read_timeout 86400;
+ proxy_pass http://localhost:8501;
+ client_max_body_size 50M;
+ }
+
+ location / {
+ proxy_pass http://localhost:8082;
+ client_max_body_size 50M;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/requirements.txt b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/requirements.txt
new file mode 100644
index 0000000..a5ab956
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/gateway/requirements.txt
@@ -0,0 +1,4 @@
+streamlit==1.16.0
+altair==4.2.2
+extra-streamlit-components==0.1.55
+jina==3.15.1.dev14
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/microservice.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/microservice.py
new file mode 100644
index 0000000..a9cded0
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/microservice.py
@@ -0,0 +1,45 @@
+# This microservice receives an API key for OpenAI (OPENAI_API_KEY) and a tweet containing potentially passive aggressive language as input.
+# It analyzes the input tweet using the OpenAI API to identify passive aggressive language and modifies the language to make it more positive without changing the meaning.
+# The microservice then returns the updated, positive version of the tweet as output.
+import os
+
+import openai
+
+from .apis import GPT_3_5_Turbo
+import json
+
+
+def func(input_json: str) -> str:
+ # Parse the input JSON string
+ input_data = json.loads(input_json)
+
+ # Extract the OpenAI API key and tweet from the input data
+ openai.api_key = input_data["OPENAI_API_KEY"]
+ print('key updated: ', input_data["OPENAI_API_KEY"])
+ tweet = input_data["tweet"]
+
+ # Initialize the GPT-3.5 Turbo API
+ gpt_3_5_turbo = GPT_3_5_Turbo(
+ system=f'''
+You are an AI language model that can modify tweets to make them more positive without changing their meaning.
+When you receive a tweet, you will return a JSON object containing the updated, positive version of the tweet.
+Example:
+Input tweet: "I can't believe you did that. It's so typical of you."
+Output JSON: {{"positive_tweet": "I'm surprised you did that. It's just like you!"}}
+''')
+
+ # Generate the prompt for the GPT-3.5 Turbo API
+ prompt = f"Input tweet: {tweet}\nOutput JSON:"
+
+ # Call the GPT-3.5 Turbo API with the prompt
+ generated_string = gpt_3_5_turbo(prompt)
+
+ # Check if the generated_string is a valid JSON string
+ try:
+ output_data = json.loads(generated_string)
+ except json.JSONDecodeError:
+ # If the generated_string is not a valid JSON string, return an error message
+ return json.dumps({"error": "Invalid JSON string generated by the GPT-3.5 Turbo API"})
+
+ # Return the output JSON string
+ return json.dumps(output_data)
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/requirements.txt b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/requirements.txt
new file mode 100644
index 0000000..054deb5
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/requirements.txt
@@ -0,0 +1,4 @@
+jina==3.15.1.dev14
+docarray==0.21.0
+openai==0.27.5
+pytest
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/run_flow.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/run_flow.py
new file mode 100644
index 0000000..560e20c
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/run_flow.py
@@ -0,0 +1,5 @@
+from jina import Flow
+
+flow = Flow.load_config('flow.yml')
+with flow:
+ flow.block()
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/test_microservice.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/test_microservice.py
new file mode 100644
index 0000000..95c276f
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/0_gpt_3_5_turbo/v3/test_microservice.py
@@ -0,0 +1,22 @@
+# This test case checks if the output of the microservice is of type 'str' for the positive_tweet property.
+# Since the output of the GPT-3.5 Turbo model is not deterministic, we cannot check for the exact output.
+# Instead, we will test if the output is a valid JSON string and if the 'positive_tweet' property is a string.
+
+from .microservice import func
+import json
+
+def test_positive_tweet_type():
+ # Define the input JSON string
+ input_json = json.dumps({
+ "OPENAI_API_KEY": "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",
+ "tweet": "I can't believe you did that. It's so typical of you."
+ })
+
+ # Call the microservice function with the input JSON string
+ output_json = func(input_json)
+
+ # Parse the output JSON string
+ output_data = json.loads(output_json)
+
+ # Check if the 'positive_tweet' property is a string
+ assert isinstance(output_data["positive_tweet"], str)
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/__init__.py b/examples/rainbow_tweet/microservice/PositiveTweetModifierExecutor3163055/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/examples/rainbow_tweet/microservice/__init__.py b/examples/rainbow_tweet/microservice/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/examples/rainbow_tweet/microservice/name.txt b/examples/rainbow_tweet/microservice/name.txt
new file mode 100644
index 0000000..87ac147
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/name.txt
@@ -0,0 +1 @@
+PositiveTweetModifierExecutor
\ No newline at end of file
diff --git a/examples/rainbow_tweet/microservice/strategies.json b/examples/rainbow_tweet/microservice/strategies.json
new file mode 100644
index 0000000..f2004cd
--- /dev/null
+++ b/examples/rainbow_tweet/microservice/strategies.json
@@ -0,0 +1,5 @@
+[
+ ["openai"],
+ ["openai", "transformers"],
+ ["openai", "textblob"]
+]
\ No newline at end of file
diff --git a/examples/rainbow_tweet/screenshot.png b/examples/rainbow_tweet/screenshot.png
new file mode 100644
index 0000000..767db0e
Binary files /dev/null and b/examples/rainbow_tweet/screenshot.png differ