Spaces:
Running
Running
File size: 9,616 Bytes
e4a3f8e 2a40784 9e04788 2a40784 9e04788 2a40784 9e04788 2a40784 9e04788 2a40784 e4a3f8e 2a40784 9e04788 2a40784 9e04788 2a40784 1d75cea 0b71f39 01022c3 0b71f39 43fa362 0b71f39 fae30dd 7ac8300 9e04788 393c328 3f2b8e8 1d75cea 393c328 e6e9444 0b71f39 01022c3 0b71f39 15963b4 0b71f39 1d75cea 7ac8300 0b71f39 393c328 3f2b8e8 1d75cea 3f2b8e8 e6e9444 15963b4 e6e9444 fae30dd 7ac8300 e6e9444 b12ea5f e6e9444 b12ea5f e6e9444 15963b4 af08225 fae30dd 7ac8300 e6e9444 ffe16ea b12ea5f 68ad267 b12ea5f e6e9444 b12ea5f e6e9444 2a40784 e4a3f8e 07ba1e6 e4a3f8e 2a40784 e6e9444 0b71f39 e6e9444 1d75cea e6e9444 f0d40a8 e6e9444 e4a3f8e 2a40784 e4a3f8e 9e04788 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 |
import requests
import pandas as pd
import gradio as gr
import plotly.express as px
from datetime import datetime
import json
from web3 import Web3
OPTIMISM_RPC_URL = 'https://opt-mainnet.g.alchemy.com/v2/U5gnXPYxeyH43MJ9tP8ONBQHEDRav7H0'
# Initialize a Web3 instance
web3 = Web3(Web3.HTTPProvider(OPTIMISM_RPC_URL))
# Check if connection is successful
if not web3.is_connected():
raise Exception("Failed to connect to the Optimism network.")
# Contract address
contract_address = '0x3d77596beb0f130a4415df3D2D8232B3d3D31e44'
# Load the ABI from the provided JSON file
with open('service_registry_abi.json', 'r') as abi_file:
contract_abi = json.load(abi_file)
# Now you can create the contract
service_registry = web3.eth.contract(address=contract_address, abi=contract_abi)
def get_transfers(integrator: str, wallet: str) -> str:
url = f"https://li.quest/v1/analytics/transfers?integrator={integrator}&wallet={wallet}"
headers = {"accept": "application/json"}
response = requests.get(url, headers=headers)
return response.json()
def fetch_and_aggregate_transactions():
total_services = service_registry.functions.totalSupply().call()
aggregated_transactions = []
for service_id in range(1, total_services + 1):
service = service_registry.functions.getService(service_id).call()
# Extract the list of agent IDs from the service data
agent_ids = service[-1] # Assuming the last element is the list of agent IDs
# Check if 25 is in the list of agent IDs
if 25 in agent_ids:
agent_address = service_registry.functions.getAgentInstances(service_id).call()[1][0]
response_transfers = get_transfers("valory", agent_address)
aggregated_transactions.extend(response_transfers["transfers"])
return aggregated_transactions
# Function to parse the transaction data and prepare it for visualization
def process_transactions(data):
transactions = data
# Convert the data into a pandas DataFrame for easy manipulation
rows = []
for tx in transactions:
# Normalize amounts
sending_amount = float(tx["sending"]["amount"]) / (10 ** tx["sending"]["token"]["decimals"])
receiving_amount = float(tx["receiving"]["amount"]) / (10 ** tx["receiving"]["token"]["decimals"])
# Convert timestamps to datetime objects
sending_timestamp = datetime.utcfromtimestamp(tx["sending"]["timestamp"])
receiving_timestamp = datetime.utcfromtimestamp(tx["receiving"]["timestamp"])
# Prepare row data
rows.append({
"transactionId": tx["transactionId"],
"from_address": tx["fromAddress"],
"to_address": tx["toAddress"],
"sending_chain": tx["sending"]["chainId"],
"receiving_chain": tx["receiving"]["chainId"],
"sending_token_symbol": tx["sending"]["token"]["symbol"],
"receiving_token_symbol": tx["receiving"]["token"]["symbol"],
"sending_amount": sending_amount,
"receiving_amount": receiving_amount,
"sending_amount_usd": float(tx["sending"]["amountUSD"]),
"receiving_amount_usd": float(tx["receiving"]["amountUSD"]),
"sending_gas_used": int(tx["sending"]["gasUsed"]),
"receiving_gas_used": int(tx["receiving"]["gasUsed"]),
"sending_timestamp": sending_timestamp,
"receiving_timestamp": receiving_timestamp,
"date": sending_timestamp.date(), # Group by day
"week": sending_timestamp.strftime('%Y-%W') # Group by week
})
df = pd.DataFrame(rows)
return df
# Function to create visualizations based on the metrics
def create_visualizations():
transactions_data = fetch_and_aggregate_transactions()
df = process_transactions(transactions_data)
# Map chain IDs to chain names
chain_name_map = {
10: "Optimism",
8453: "Base",
1: "Ethereum"
}
df["sending_chain"] = df["sending_chain"].map(chain_name_map)
df["receiving_chain"] = df["receiving_chain"].map(chain_name_map)
# Ensure that chain IDs are strings for consistent grouping
df["sending_chain"] = df["sending_chain"].astype(str)
df["receiving_chain"] = df["receiving_chain"].astype(str)
df['date'] = pd.to_datetime(df['date'])
# Total transactions per chain per day
tx_per_chain = df.groupby(["date", "sending_chain"]).size().reset_index(name="transaction_count")
fig_tx_chain = px.bar(
tx_per_chain,
x="date",
y="transaction_count",
color="sending_chain",
title="Chain Daily Activity: Transactions",
labels={"sending_chain": "Transaction Chain","transaction_count": "Daily Transaction Nr"},
barmode="stack",
color_discrete_sequence=["purple", "darkgreen"]
)
fig_tx_chain.update_layout(
xaxis_title=None,
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=tx_per_chain['date'],
ticktext=tx_per_chain['date'].dt.strftime('%Y-%m-%d'),
tickangle=0,
),
bargap=0.8,
height=700,
)
fig_tx_chain.update_xaxes(tickformat="%Y-%m-%d")
# Identify swap transactions
df["is_swap"] = df.apply(lambda x: x["sending_token_symbol"] != x["receiving_token_symbol"], axis=1)
# Total swaps per chain per day
swaps_per_chain = df[df["is_swap"]].groupby(["date", "sending_chain"]).size().reset_index(name="swap_count")
fig_swaps_chain = px.bar(
swaps_per_chain,
x="date",
y="swap_count",
color="sending_chain",
title="Chain Daily Activity: Swaps",
labels={"sending_chain": "Transaction Chain", "swap_count": "Daily Swap Nr"},
barmode="stack",
color_discrete_sequence=["purple", "darkgreen"]
)
fig_swaps_chain.update_layout(
xaxis_title=None,
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=swaps_per_chain['date'],
ticktext=swaps_per_chain['date'].dt.strftime('%Y-%m-%d'),
tickangle=0,
),
bargap=0.8,
height=700,
)
fig_swaps_chain.update_xaxes(tickformat="%Y-%m-%d")
# Identify bridge transactions
df["is_bridge"] = df.apply(lambda x: x["sending_chain"] != x["receiving_chain"], axis=1)
# Total bridges per chain per day
bridges_per_chain = df[df["is_bridge"]].groupby(["date", "sending_chain"]).size().reset_index(name="bridge_count")
fig_bridges_chain = px.bar(
bridges_per_chain,
x="date",
y="bridge_count",
color="sending_chain",
title="Chain Daily Activity: Bridges",
labels={"sending_chain": "Transaction Chain","bridge_count": "Daily Bridge Nr"},
barmode="stack",
color_discrete_sequence=["purple", "darkgreen"]
)
fig_bridges_chain.update_layout(
xaxis_title=None,
yaxis=dict(tickmode='linear', tick0=0, dtick=1),
xaxis=dict(
tickmode='array',
tickvals=bridges_per_chain['date'],
ticktext=bridges_per_chain['date'].dt.strftime('%Y-%m-%d'),
tickangle=0,
),
bargap=0.8,
height=700,
)
fig_bridges_chain.update_xaxes(tickformat="%Y-%m-%d")
# Investment per agent per day
investment_per_agent = df.groupby(["date", "from_address", "sending_chain"])["sending_amount_usd"].sum().reset_index()
fig_investment_agent = px.bar(
investment_per_agent,
x="date",
y="sending_amount_usd",
color="sending_chain",
title="Amount of Investment (USD) per Day",
labels={"sending_chain": "Transaction Chain","sending_amount_usd": "Investment Amount (USD)"},
barmode="stack",
color_discrete_sequence=["purple", "darkgreen"]
)
fig_investment_agent.update_layout(
xaxis_title=None,
yaxis=dict(
title="Investment Amount (USD)",
tickmode='auto',
nticks=10,
tickformat='.2f' # Show 2 decimal places
),
xaxis=dict(
tickmode='array',
tickvals=investment_per_agent['date'],
ticktext=investment_per_agent['date'].dt.strftime('%Y-%m-%d'),
tickangle=0,
),
bargap=0.8,
height=700,
)
fig_investment_agent.update_xaxes(tickformat="%Y-%m-%d")
return fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent
# Gradio interface
def dashboard():
with gr.Blocks() as demo:
gr.Markdown("# Valory Transactions Dashboard")
# Fetch and display visualizations
with gr.Tab("Transactions"):
fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent = create_visualizations()
gr.Plot(fig_tx_chain)
with gr.Tab("Swaps"):
fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent = create_visualizations()
gr.Plot(fig_swaps_chain)
with gr.Tab("Bridges"):
fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent = create_visualizations()
gr.Plot(fig_bridges_chain)
with gr.Tab("Investment"):
fig_tx_chain, fig_swaps_chain, fig_bridges_chain, fig_investment_agent = create_visualizations()
gr.Plot(fig_investment_agent)
return demo
# Launch the dashboard
if __name__ == "__main__":
dashboard().launch() |