mirror of
https://github.com/aljazceru/hummingbot-dashboard.git
synced 2025-12-23 00:04:25 +01:00
(feat) added all pages
This commit is contained in:
20
README.md
20
README.md
@@ -1,30 +1,28 @@
|
||||
# Hummingbot Dashboard
|
||||
Hummingbot Dashboard is a community project to build dashboards that help you deploy, manage, backtest, and analyze Hummingbot instances (and much more!) Each dashboard is maintained by 1-2 community members.
|
||||
|
||||
Collection of data visualization and analysis Hummingbot-related dashboards. The dashboards helps you run and manage Hummingbot, analyze performance, analyze trade data, and much more!
|
||||
|
||||
Dashboard is built using [StreamLit](https://streamlit.io/) and uses the Conda environment & package manager to simiplify installation, updates, and manage dependencies.
|
||||
|
||||
## Dashboards
|
||||
### Dashboards
|
||||
|
||||
Here are the current dashboards in the collection:
|
||||
|
||||
* 🚀 **Strategy Performance**: Analyze the performance of a running Hummingbot instance
|
||||
* 🐙 **Bot Orchestration**: Deploy and manage Hummingbot instances
|
||||
* ⚙️ **Backtesting**: Deploy and manage backtests of directional strategies
|
||||
* ⚙️ **Backtest Manager**: Deploy and manage backtests of directional strategies
|
||||
* 🗂 **Candles Downloader**: Download historical exchange data as OHLVC candles. Supports multiple trading pairs and custom time ranges/intervals.
|
||||
* 🧳 **Database Inspector**: Inspect and analyze the orders and trades data contained in a Hummingbot strategy database
|
||||
* 🔍 **DB Inspector**: Inspect and analyze the orders and trades data contained in a Hummingbot strategy database
|
||||
* 🧙 **Token Spreads**: Identify cross-exchange trading opportunities by analyzing differences in token spreads across venues
|
||||
* 🦉 **TVL vs MCAPs**: Easily compare various DeFi protocols based on their market capitalization and total value locked, using DeFiLlama data.
|
||||
|
||||
## Installation
|
||||
### Installation
|
||||
|
||||
This project is built using [StreamLit](https://streamlit.io/) and uses Anaconda and Docker to simplify installation, updates, and manage dependencies.
|
||||
|
||||
See [Installation](https://github.com/hummingbot/dashboard/blob/feat/base-page-class/INSTALLATION.md) for how to install and update the dashboard.
|
||||
|
||||
## Contributions
|
||||
### Contributions
|
||||
|
||||
We welcome contributions from the community! See [Contribution](https://github.com/hummingbot/dashboard/blob/feat/base-page-class/CONTRIBUTING.md) for more information.
|
||||
|
||||
## Participation
|
||||
### Meetings
|
||||
|
||||
We hold bi-weekly livestream Dashboard project meetings. You can participate on our [Discord](https://discord.gg/hummingbot)
|
||||
* Alternating Wenesdays, 3pm GMT / 11am EST / 8am PST / 11pm SIN
|
||||
|
||||
6
main.py
6
main.py
@@ -22,7 +22,11 @@ show_pages(
|
||||
Page("main.py", "Hummingbot Dashboard", "📊"),
|
||||
Page("pages/strategy_performance/app.py", "Strategy Performance", "🚀"),
|
||||
Page("pages/bot_orchestration/app.py", "Bot Orchestration", "🐙"),
|
||||
Page("pages/backtesting/app.py", "Backtesting", "⚙️"),
|
||||
Page("pages/backtest_manager/app.py", "Backtest Manager", "⚙️"),
|
||||
Page("pages/candles_downloader/app.py", "Candles Downloader", "🗂"),
|
||||
Page("pages/db_inspector/app.py", "DB Inspector", "🔍"),
|
||||
Page("pages/token_spreads/app.py", "Token Spreads", "🧙"),
|
||||
Page("pages/tvl_vs_mcap/app.py", "TVL vs Market Cap", "🦉"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from utils.os_utils import load_directional_strategies, save_file, get_function_
|
||||
import optuna
|
||||
|
||||
# Page metadata
|
||||
title = "Backtesting"
|
||||
title = "Backtest Manager"
|
||||
icon = "⚙️"
|
||||
|
||||
st.set_page_config(
|
||||
1
pages/candles_downloader/README.md
Normal file
1
pages/candles_downloader/README.md
Normal file
@@ -0,0 +1 @@
|
||||
Download historical exchange data as OHLVC candles. Supports multiple trading pairs and custom time ranges/intervals.
|
||||
@@ -2,19 +2,31 @@ import time
|
||||
from subprocess import CalledProcessError
|
||||
|
||||
import streamlit as st
|
||||
from pathlib import Path
|
||||
|
||||
import constants
|
||||
from utils import os_utils
|
||||
from docker_manager import DockerManager
|
||||
|
||||
# Page metadata
|
||||
title = "Candles Downloader"
|
||||
icon = "🗂️"
|
||||
|
||||
st.set_page_config(
|
||||
page_title="Candles Downloader",
|
||||
page_icon=":bar_chart:",
|
||||
page_title=title,
|
||||
page_icon=icon,
|
||||
layout="wide",
|
||||
)
|
||||
st.title(f"{icon} {title}")
|
||||
|
||||
# About this page
|
||||
current_directory = Path(__file__).parent
|
||||
readme_path = current_directory / "README.md"
|
||||
with st.expander("About This Page"):
|
||||
st.write(readme_path.read_text())
|
||||
|
||||
# Start content here
|
||||
docker_manager = DockerManager()
|
||||
st.write(f"# 🗂️ Candles Downloader")
|
||||
st.write("---")
|
||||
|
||||
c1, c2, c3 = st.columns([2, 2, 0.5])
|
||||
with c1:
|
||||
1
pages/db_inspector/README.md
Normal file
1
pages/db_inspector/README.md
Normal file
@@ -0,0 +1 @@
|
||||
Inspect and analyze the orders and trades data contained in a Hummingbot strategy database
|
||||
@@ -1,8 +1,27 @@
|
||||
import streamlit as st
|
||||
from pathlib import Path
|
||||
|
||||
import sqlite3
|
||||
import pandas as pd
|
||||
|
||||
# Page metadata
|
||||
title = "DB Inspector"
|
||||
icon = "🔍"
|
||||
|
||||
st.set_page_config(
|
||||
page_title=title,
|
||||
page_icon=icon,
|
||||
layout="wide",
|
||||
)
|
||||
st.title(f"{icon} {title}")
|
||||
|
||||
# About this page
|
||||
current_directory = Path(__file__).parent
|
||||
readme_path = current_directory / "README.md"
|
||||
with st.expander("About This Page"):
|
||||
st.write(readme_path.read_text())
|
||||
|
||||
# Start content here
|
||||
@st.cache_data
|
||||
def get_table_data(database_name: str, table_name: str):
|
||||
conn = sqlite3.connect(database_name)
|
||||
@@ -17,9 +36,6 @@ def get_all_tables(database_name: str):
|
||||
tables = [table_row[0] for table_row in cursor.fetchall()]
|
||||
return tables
|
||||
|
||||
st.set_page_config(layout='wide')
|
||||
st.title("🧳 Hummingbot Database Analyzer")
|
||||
st.write("---")
|
||||
uploaded_file = st.file_uploader("Add your database")
|
||||
|
||||
if uploaded_file is not None:
|
||||
1
pages/token_spreads/README.md
Normal file
1
pages/token_spreads/README.md
Normal file
@@ -0,0 +1 @@
|
||||
Identify cross-exchange trading opportunities by analyzing differences in token spreads across venues
|
||||
@@ -1,10 +1,29 @@
|
||||
import pandas as pd
|
||||
import streamlit as st
|
||||
from pathlib import Path
|
||||
import plotly.express as px
|
||||
import CONFIG
|
||||
from utils.coingecko_utils import CoinGeckoUtils
|
||||
from utils.miner_utils import MinerUtils
|
||||
|
||||
# Page metadata
|
||||
title = "Token Spreads"
|
||||
icon = "🧙"
|
||||
|
||||
st.set_page_config(
|
||||
page_title=title,
|
||||
page_icon=icon,
|
||||
layout="wide",
|
||||
)
|
||||
st.title(f"{icon} {title}")
|
||||
|
||||
# About this page
|
||||
current_directory = Path(__file__).parent
|
||||
readme_path = current_directory / "README.md"
|
||||
with st.expander("About This Page"):
|
||||
st.write(readme_path.read_text())
|
||||
|
||||
# Start content here
|
||||
cg_utils = CoinGeckoUtils()
|
||||
miner_utils = MinerUtils()
|
||||
|
||||
@@ -24,10 +43,6 @@ def get_miner_stats_df():
|
||||
def get_coin_tickers_by_id_list(coins_id: list):
|
||||
return cg_utils.get_coin_tickers_by_id_list(coins_id)
|
||||
|
||||
st.set_page_config(layout='wide')
|
||||
st.title("🧙Cross Exchange Token Analyzer")
|
||||
st.write("---")
|
||||
|
||||
with st.spinner(text='In progress'):
|
||||
exchanges_df = get_all_exchanges_df()
|
||||
coins_df = get_all_coins_df()
|
||||
@@ -35,7 +50,6 @@ with st.spinner(text='In progress'):
|
||||
|
||||
miner_coins = coins_df.loc[coins_df["symbol"].isin(miner_stats_df["base"].str.lower().unique()), "name"]
|
||||
|
||||
st.write("### Coins filter 🦅")
|
||||
tokens = st.multiselect(
|
||||
"Select the tokens to analyze:",
|
||||
options=coins_df["name"],
|
||||
@@ -47,8 +61,7 @@ coins_id = coins_df.loc[coins_df["name"].isin(tokens), "id"].tolist()
|
||||
coin_tickers_df = get_coin_tickers_by_id_list(coins_id)
|
||||
coin_tickers_df["coin_name"] = coin_tickers_df.apply(lambda x: coins_df.loc[coins_df["id"] == x.token_id, "name"].item(), axis=1)
|
||||
|
||||
st.sidebar.write("### Exchanges filter 🦅")
|
||||
exchanges = st.sidebar.multiselect(
|
||||
exchanges = st.multiselect(
|
||||
"Select the exchanges to analyze:",
|
||||
options=exchanges_df["name"],
|
||||
default=[exchange for exchange in CONFIG.MINER_EXCHANGES if exchange in exchanges_df["name"].unique()]
|
||||
@@ -75,6 +88,6 @@ fig = px.scatter(
|
||||
}
|
||||
)
|
||||
|
||||
st.sidebar.write("# Data filters 🏷")
|
||||
st.sidebar.code("🧳 New filters coming. \nReach us on discord \nif you want to propose one!")
|
||||
# st.write("# Data filters 🏷")
|
||||
# st.code("🧳 New filters coming. \nReach us on discord \nif you want to propose one!")
|
||||
st.plotly_chart(fig, use_container_width=True)
|
||||
3
pages/tvl_vs_mcap/README.md
Normal file
3
pages/tvl_vs_mcap/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Easily compare various DeFi protocols based on their market capitalization and total value locked, using DeFiLlama data.
|
||||
|
||||
Data Source: [DefiLlama](https://defillama.com/)
|
||||
@@ -1,9 +1,28 @@
|
||||
import numpy as np
|
||||
import streamlit as st
|
||||
from pathlib import Path
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
from defillama import DefiLlama
|
||||
|
||||
# Page metadata
|
||||
title = "TVL vs Market Cap"
|
||||
icon = "🦉"
|
||||
|
||||
st.set_page_config(
|
||||
page_title=title,
|
||||
page_icon=icon,
|
||||
layout="wide",
|
||||
)
|
||||
st.title(f"{icon} {title}")
|
||||
|
||||
# About this page
|
||||
current_directory = Path(__file__).parent
|
||||
readme_path = current_directory / "README.md"
|
||||
with st.expander("About This Page"):
|
||||
st.write(readme_path.read_text())
|
||||
|
||||
# Start content here
|
||||
MIN_TVL = 1000000.
|
||||
MIN_MCAP = 1000000.
|
||||
|
||||
@@ -17,18 +36,13 @@ def get_tvl_mcap_data():
|
||||
def get_protocols_by_chain_category(protocols: pd.DataFrame, group_by: list, nth: list):
|
||||
return protocols.sort_values('tvl', ascending=False).groupby(group_by).nth(nth).reset_index()
|
||||
|
||||
st.set_page_config(layout='wide')
|
||||
st.title("🦉 TVL vs MCAP Analysis")
|
||||
st.write("---")
|
||||
st.code("💡 Source: [DefiLlama](https://defillama.com/)")
|
||||
|
||||
with st.spinner(text='In progress'):
|
||||
tvl_mcap_df = get_tvl_mcap_data()
|
||||
|
||||
default_chains = ["Ethereum", "Solana", "Binance", "Polygon", "Multi-Chain", "Avalanche"]
|
||||
|
||||
st.sidebar.write("### Chains filter 🔗")
|
||||
chains = st.sidebar.multiselect(
|
||||
st.write("### Chains 🔗")
|
||||
chains = st.multiselect(
|
||||
"Select the chains to analyze:",
|
||||
options=tvl_mcap_df["chain"].unique(),
|
||||
default=default_chains)
|
||||
@@ -52,10 +66,10 @@ scatter = px.scatter(
|
||||
|
||||
st.plotly_chart(scatter, use_container_width=True)
|
||||
|
||||
st.sidebar.write("---")
|
||||
st.sidebar.write("### SunBurst filter 🔗")
|
||||
groupby = st.sidebar.selectbox('Group by:', [['chain', 'category'], ['category', 'chain']])
|
||||
nth = st.sidebar.slider('Top protocols by Category', min_value=1, max_value=5)
|
||||
st.write("---")
|
||||
st.write("### SunBurst 🌞")
|
||||
groupby = st.selectbox('Group by:', [['chain', 'category'], ['category', 'chain']])
|
||||
nth = st.slider('Top protocols by Category', min_value=1, max_value=5)
|
||||
|
||||
proto_agg = get_protocols_by_chain_category(tvl_mcap_df[tvl_mcap_df["chain"].isin(chains)], groupby, np.arange(0, nth, 1).tolist())
|
||||
groupby.append("slug")
|
||||
@@ -68,6 +82,3 @@ sunburst = px.sunburst(
|
||||
template="plotly_dark",)
|
||||
|
||||
st.plotly_chart(sunburst, use_container_width=True)
|
||||
|
||||
st.sidebar.write("# Data filters 🏷")
|
||||
st.sidebar.code("🧳 New filters coming. \nReach us on discord \nif you want to propose one!")
|
||||
Reference in New Issue
Block a user