!pip install pycoingecko
!pip install dune-client
!pip install seaborn
import pandas as pd
import numpy as np
from dune_client.client import DuneClient
from pycoingecko import CoinGeckoAPI
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.ensemble import IsolationForest
import os
from datetime import datetime, timedelta
from tabulate import tabulate
%matplotlib inline
#plt.style.use('seaborn')
def get_vertex_price_data(days=90):
cg = CoinGeckoAPI()
vertex_data = cg.get_coin_market_chart_by_id(id='vertex-protocol', vs_currency='usd', days=days)
df = pd.DataFrame(vertex_data['prices'], columns=['timestamp', 'price'])
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms')
df.set_index('timestamp', inplace=True)
return df
# Fetch Vertex Protocol price data
vertex_price_data = get_vertex_price_data(days=365)
# Anomaly detection using Isolation Forest
contamination = 0.05
model = IsolationForest(contamination=contamination, random_state=42)
anomalies = model.fit_predict(vertex_price_data)
vertex_price_data['anomaly'] = anomalies
def fetch_dune_data(query_id):
api_key = "tHSVl5e4sFqeNnps1U2NKMIsGi2eNjz8"
dune = DuneClient(
api_key=api_key,
base_url="https://api.dune.com",
request_timeout=300
)
query_result = dune.get_latest_result(query_id)
return pd.DataFrame(query_result.result.rows)
Run to view results
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from datetime import datetime, timedelta
import os
def staking_analysis(stakers_df):
stakers_df['staked_amount'] = pd.to_numeric(stakers_df['staked_amount'], errors='coerce')
cleaned_stakers = stakers_df[stakers_df['staked_amount'] > 0]
return {
"total_stakers": len(cleaned_stakers),
"total_staked": cleaned_stakers['staked_amount'].sum(),
"average_stake": cleaned_stakers['staked_amount'].mean(),
"median_stake": cleaned_stakers['staked_amount'].median(),
"max_stake": cleaned_stakers['staked_amount'].max(),
"min_stake": cleaned_stakers['staked_amount'].min()
}
def fetch_shorting_historical_cached(query_id, cache_file='shorting_historical_cache.pkl'):
cache_file_path = os.path.join(os.getcwd(), cache_file)
if os.path.exists(cache_file_path):
cache_time = datetime.fromtimestamp(os.path.getmtime(cache_file_path))
if datetime.now() - cache_time < timedelta(hours=24):
print("Loading cached shorting historical data...")
return pd.read_pickle(cache_file_path)
print("Fetching new shorting historical data...")
shorting_historical = fetch_dune_data(query_id)
shorting_historical.to_pickle(cache_file_path)
return shorting_historical
# Fetch data
vertex_stakers = fetch_dune_data(3931067)
fees_profit = fetch_dune_data(3934282)
liquidations_profit = fetch_dune_data(3934207)
open_interest = fetch_dune_data(3934109)
shorting_historical = fetch_shorting_historical_cached(4056110)
staking_metrics = staking_analysis(vertex_stakers)
cleaned_stakers = vertex_stakers[vertex_stakers['staked_amount'] > 0]
# Vertex Protocol Price Over Time
fig, ax = plt.subplots(figsize=(12, 6))
ax.plot(vertex_price_data.index, vertex_price_data['price'])
ax.set_title("Vertex Protocol Price Over Time")
ax.set_xlabel("Date")
ax.set_ylabel("Price (USD)")
ax.tick_params(axis='x', rotation=45)
anomaly_dates = vertex_price_data[vertex_price_data['anomaly'] == -1].index
for date in anomaly_dates:
ax.axvline(x=date, color='red', linestyle='--', alpha=0.5)
plt.tight_layout()
plt.show()
# Vertex Protocol Top 20 stakers visualization
total_staked_vertex = cleaned_stakers['staked_amount'].sum()
top_20_stakers = cleaned_stakers.nlargest(20, 'staked_amount')
top_20_stakers = top_20_stakers.reset_index(drop=True)
top_20_stakers.index = range(1, 21) # Set index from 1 to 20
top_20_stakers['percentage'] = top_20_stakers['staked_amount'] / total_staked_vertex * 100
plt.figure(figsize=(12, 10))
plt.barh(top_20_stakers.index, top_20_stakers['percentage'])
plt.gca().invert_yaxis() # Invert y-axis to have rank 1 at the top
plt.title('Top 20 Vertex Protocol Stakers by Percentage of Total Staked')
plt.xlabel('Percentage of Total Staked')
plt.ylabel('Staker Rank')
plt.yticks(range(1, 21)) # Explicitly set y-ticks from 1 to 20
plt.xlim(0, max(top_20_stakers['percentage']) * 1.1) # Set x-axis limit with some padding
# Add percentage labels to the end of each bar
for i, v in enumerate(top_20_stakers['percentage']):
plt.text(v, i+1, f'{v:.2f}%', va='center')
plt.tight_layout()
plt.show()
# Staking Metrics
print("\nStaking Metrics:")
print(f"Total Stakers: {staking_metrics['total_stakers']}")
print(f"Total Staked: {staking_metrics['total_staked']:.2f}")
total_staked = cleaned_stakers['staked_amount'].sum()
percentages = [1, 5, 10, 50, 90, 95, 99]
print("\nStaker Distribution Analysis:")
for p in percentages:
threshold = np.percentile(cleaned_stakers['staked_amount'], 100-p)
stakers_above = cleaned_stakers[cleaned_stakers['staked_amount'] >= threshold]
percent_of_total = stakers_above['staked_amount'].sum() / total_staked * 100
print(f"Top {p}% of stakers hold {percent_of_total:.2f}% of total staked amount")
def staking_analysis(stakers_df, amount_column='staked_amount'):
stakers_df[amount_column] = pd.to_numeric(stakers_df[amount_column], errors='coerce')
cleaned_stakers = stakers_df[stakers_df[amount_column] > 0]
return {
"total_stakers": len(cleaned_stakers),
"total_staked": cleaned_stakers[amount_column].sum(),
"average_stake": cleaned_stakers[amount_column].mean(),
"median_stake": cleaned_stakers[amount_column].median(),
"max_stake": cleaned_stakers[amount_column].max(),
"min_stake": cleaned_stakers[amount_column].min()
}
# Fetch Ethereum stakers data
ethereum_stakers = fetch_dune_data(4085083)
ethereum_stakers['net_staked'] = pd.to_numeric(ethereum_stakers['net_staked'], errors='coerce')
cleaned_eth_stakers = ethereum_stakers[ethereum_stakers['net_staked'] > 0]
# Ethereum staking analysis
eth_staking_metrics = staking_analysis(cleaned_eth_stakers, 'net_staked')
# Ethereum Top 20 stakers visualization
total_staked_eth = cleaned_eth_stakers['net_staked'].sum()
top_20_eth_stakers = cleaned_eth_stakers.nlargest(20, 'net_staked')
top_20_eth_stakers = top_20_eth_stakers.reset_index(drop=True)
top_20_eth_stakers.index = range(1, 21) # Set index from 1 to 20
top_20_eth_stakers['percentage'] = top_20_eth_stakers['net_staked'] / total_staked_eth * 100
plt.figure(figsize=(12, 10))
plt.barh(top_20_eth_stakers.index, top_20_eth_stakers['percentage'])
plt.gca().invert_yaxis() # Invert y-axis to have rank 1 at the top
plt.title('Top 20 Ethereum Stakers by Percentage of Total Staked')
plt.xlabel('Percentage of Total Staked')
plt.ylabel('Staker Rank')
plt.yticks(range(1, 21)) # Explicitly set y-ticks from 1 to 20
plt.xlim(0, max(top_20_eth_stakers['percentage']) * 1.1) # Set x-axis limit with some padding
# Add percentage labels to the end of each bar
for i, v in enumerate(top_20_eth_stakers['percentage']):
plt.text(v, i+1, f'{v:.2f}%', va='center')
plt.tight_layout()
plt.show()
# Ethereum Staking Metrics
print("\nEthereum Staking Metrics:")
print(f"Total Stakers: {eth_staking_metrics['total_stakers']}")
print(f"Total Staked: {eth_staking_metrics['total_staked']:.2f}")
# Ethereum Staker Distribution Analysis
eth_total_staked = cleaned_eth_stakers['net_staked'].sum()
percentages = [1, 5, 10, 50, 90, 95, 99]
print("\nEthereum Staker Distribution Analysis:")
for p in percentages:
threshold = np.percentile(cleaned_eth_stakers['net_staked'], 100-p)
stakers_above = cleaned_eth_stakers[cleaned_eth_stakers['net_staked'] >= threshold]
percent_of_total = stakers_above['net_staked'].sum() / eth_total_staked * 100
print(f"Top {p}% of stakers hold {percent_of_total:.2f}% of total staked amount")
# Profit from Fees
colors = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728']
fig, ax = plt.subplots(figsize=(10, 6))
bars = ax.bar(['Total Taker Fees', 'Total Maker Rebates', 'Total Net Fees', 'Max Treasury Allocation'],
[fees_profit['total_taker_fees'].iloc[0], fees_profit['total_maker_rebates'].iloc[0],
fees_profit['total_net_fees'].iloc[0], fees_profit['max_treasury_allocation'].iloc[0]],
color=colors)
bars[1].set_color('#d62728')
bars[3].set_color('#2ca02c')
ax.set_title('Profit from Fees')
ax.set_ylabel('USD')
ax.tick_params(axis='x', rotation=45)
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width() / 2, height, f'${height:,.0f}', ha='center', va='bottom', fontsize=10)
fig.tight_layout()
plt.show()
print(f"Data up to: {fees_profit['data_up_to'].iloc[0]}")
print("\nNote: Please refer to this query (https://dune.com/queries/3934282) for logic behind open_interest")
# Open interest by market
plt.figure(figsize=(12, 6))
sns.barplot(data=open_interest, x=open_interest['market'].astype('category'), y='open_interest_usd')
plt.title('Open Interest by Market')
plt.xlabel('Market')
plt.ylabel('Open Interest (USD)')
plt.xticks(rotation=90)
plt.tight_layout()
plt.show()
print("\nNote: Please refer to this query (https://dune.com/queries/3934109) for logic behind open_interest")
# Liquidations over time
liquidations_profit = liquidations_profit.sort_values('date')
plt.figure(figsize=(12, 6))
plt.plot(liquidations_profit['date'], liquidations_profit['total_liquidated'])
plt.title('Total Liquidated Over Time')
plt.xlabel('Date')
plt.ylabel('Total Liquidated')
plt.xticks(rotation=45, ha='right')
plt.gca().xaxis.set_major_locator(plt.MaxNLocator(10))
plt.tight_layout()
plt.show()
print("\nNote: Please refer to this query (https://dune.com/queries/3934207) for logic behind liquidations_profit")
Run to view results
import requests
import pandas as pd
import matplotlib.pyplot as plt
from datetime import datetime
import dateutil.parser
from collections import defaultdict
global_total_vrtx_liquidity = 0
def get_cex_data():
coin_id = "vertex-protocol"
url = f"https://api.coingecko.com/api/v3/coins/{coin_id}/tickers"
try:
response = requests.get(url)
response.raise_for_status()
data = response.json()
cex_data = []
for ticker in data['tickers']:
try:
if ticker['market']['identifier'] != 'camelot_v3': # Exclude DEX
exchange = {
'Exchange': ticker['market']['name'],
'Type': 'CEX',
'Pair': ticker['base'] + '/' + ticker['target'],
'Volume_USD': float(ticker['converted_volume']['usd']),
}
# Estimate liquidity based on volume
exchange['Estimated_Liquidity'] = exchange['Volume_USD'] / 24 # Assume hourly volume as liquidity
cex_data.append(exchange)
except (KeyError, ValueError, TypeError) as e:
print(f"Error processing CEX ticker: {e}")
return cex_data
except requests.RequestException as e:
print(f"An error occurred while fetching CEX data: {e}")
return []
def get_dex_data():
networks = ['ethereum', 'arbitrum']
token_addresses = [
'0xbbEE07B3e8121227AfCFe1E2B82772246226128e', # VRTX on Ethereum
'0x95146881b86B3ee99e63705eC87AfE29Fcc044D9' # VRTX on Arbitrum
]
base_url = "https://api.geckoterminal.com/api/v2/networks/{}/tokens/{}/pools"
headers = {"Accept": "application/json"}
dex_data = []
for network, token_address in zip(networks, token_addresses):
url = base_url.format(network, token_address)
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
data = response.json()
if 'data' in data:
for pool in data['data']:
dex_name = pool['relationships']['dex']['data']['id']
pool_info = {
'Exchange': f"{dex_name.replace('_', ' ').title()} ({network.capitalize()})",
'Type': 'DEX',
'Pair': pool['attributes'].get('name', 'Unknown'),
'Estimated_Liquidity': float(pool['attributes'].get('reserve_in_usd', 0))
}
dex_data.append(pool_info)
else:
print(f"No data found for network: {network}")
except requests.exceptions.RequestException as e:
print(f"Failed to fetch DEX data for network: {network}. Error: {str(e)}")
return dex_data
def calculate_total_liquidity():
global global_total_vrtx_liquidity
cex_data = get_cex_data()
dex_data = get_dex_data()
all_data = cex_data + dex_data
total_liquidity = sum(exchange['Estimated_Liquidity'] for exchange in all_data)
sorted_exchanges = sorted(all_data, key=lambda x: x['Estimated_Liquidity'], reverse=True)
global_total_vrtx_liquidity = total_liquidity
return total_liquidity, sorted_exchanges
def print_liquidity_summary(total_liquidity, sorted_exchanges):
print(f"Total VRTX Liquidity Across All Exchanges: ${total_liquidity:,.2f}")
print("\nTop 10 Exchanges by Liquidity:")
print("-" * 80)
print(f"{'Exchange':<40} {'Type':<5} {'Estimated Liquidity':>20}")
print("-" * 80)
for exchange in sorted_exchanges[:10]:
print(f"{exchange['Exchange']:<40} {exchange['Type']:<5} ${exchange['Estimated_Liquidity']:>19,.2f}")
def create_bar_chart(sorted_exchanges):
top_10 = sorted_exchanges[:10]
exchanges = [exchange['Exchange'] for exchange in top_10]
liquidity = [exchange['Estimated_Liquidity'] for exchange in top_10]
plt.figure(figsize=(12, 6))
bars = plt.bar(exchanges, liquidity)
plt.title('Top 10 Exchanges by VRTX Liquidity')
plt.xlabel('Exchange')
plt.ylabel('Liquidity (USD)')
plt.xticks(rotation=45, ha='right')
plt.tight_layout()
# Add value labels on top of each bar
for bar in bars:
height = bar.get_height()
plt.text(bar.get_x() + bar.get_width()/2., height,
f'${height:,.0f}',
ha='center', va='bottom', rotation=0)
plt.show()
if __name__ == "__main__":
total_liquidity, sorted_exchanges = calculate_total_liquidity()
print_liquidity_summary(total_liquidity, sorted_exchanges)
create_bar_chart(sorted_exchanges)
# Print the global variable for verification
print(f"\nGlobal Total VRTX Liquidity: ${global_total_vrtx_liquidity:,.2f}")
Run to view results
import pandas as pd
import numpy as np
from pycoingecko import CoinGeckoAPI
def analyze_liquidations(liquidations_df):
return {
"max_daily_liquidation": liquidations_df['max_daily_liquidation'].iloc[0] if 'max_daily_liquidation' in liquidations_df.columns else None
}
def calculate_cost_of_corruption(staking_metrics):
vertex_price = get_vertex_price()
total_value_staked = staking_metrics['total_staked'] * vertex_price
governance_threshold = 0.67 # 67% of total value staked
cost_of_corruption = total_value_staked * governance_threshold
return {
"total_value_staked": total_value_staked,
"cost_of_corruption": cost_of_corruption,
"governance_threshold": governance_threshold
}
def calculate_profit_from_corruption(fees, liquidations, total_vrtx_liquidity):
return {
"fees_profit": fees['max_treasury_allocation'].iloc[0],
"liquidations_profit": liquidations['max_daily_liquidation'],
"shorting_profit": total_vrtx_liquidity,
"total_profit": (fees['max_treasury_allocation'].iloc[0] +
liquidations['max_daily_liquidation'] +
total_vrtx_liquidity)
}
def get_vertex_price():
cg = CoinGeckoAPI()
vertex_data = cg.get_coin_by_id(id='vertex-protocol')
return vertex_data['market_data']['current_price']['usd']
def assess_governance_risk():
return {
"centralized_sequencer": True,
"dao_governance": False,
"risk_level": "Medium"
}
def calculate_safety_factor(coc, pfc):
return (coc['cost_of_corruption'] - pfc['total_profit']) / coc['cost_of_corruption']
def run_analysis(liquidations_profit, fees_profit, staking_metrics):
print("\nVertex Protocol Economic Security Analysis")
total_vrtx_liquidity = global_total_vrtx_liquidity
print("\n1. Cost of Corruption:")
coc = calculate_cost_of_corruption(staking_metrics)
print(f" Total Value Staked: ${coc['total_value_staked']:,.2f}")
print(f" Governance Threshold: {coc['governance_threshold']:.2%}")
print(f" Cost of Corruption: ${coc['cost_of_corruption']:,.2f}")
print("\n2. Profit from Corruption Vectors:")
liquidations_metrics = analyze_liquidations(liquidations_profit)
pfc = calculate_profit_from_corruption(fees_profit, liquidations_metrics, total_vrtx_liquidity)
print(" a. Fees Profit:")
print(f" ${pfc['fees_profit']:,.2f}")
print(" b. Liquidations Profit:")
print(f" ${pfc['liquidations_profit']:,.2f}")
print(" c. Profit from Shorting $VRTX Token:")
print(f" ${pfc['shorting_profit']:,.2f}")
print(f"\n Total Profit: ${pfc['total_profit']:,.2f}")
print("\n3. Safety Factor Analysis:")
safety_factor = calculate_safety_factor(coc, pfc)
print(f" Safety Factor (SF): {safety_factor:.4f}")
print(f"\n Cost of Corruption (CoC): ${coc['cost_of_corruption']:,.2f}")
print(f" Profit from Corruption (PfC): ${pfc['total_profit']:,.2f}")
run_analysis(liquidations_profit, fees_profit, staking_metrics)
Run to view results