Skip to content

Commit

Permalink
Merge branch 'master' into blackwing
Browse files Browse the repository at this point in the history
  • Loading branch information
steegecs authored Oct 15, 2024
2 parents ebd8bd8 + 4900d47 commit c8973de
Show file tree
Hide file tree
Showing 306 changed files with 53,360 additions and 24 deletions.
333 changes: 333 additions & 0 deletions deployment/decentralized_network_deployments.csv

Large diffs are not rendered by default.

224 changes: 214 additions & 10 deletions deployment/deployment.json
Original file line number Diff line number Diff line change
Expand Up @@ -11781,7 +11781,7 @@
},
"decentralized-network": {
"slug": "binance-staked-eth-ethereum",
"query-id": "todo"
"query-id": "EdxuQc4V8zGV9f34KZJhUNmBrfdPWMmbjVTzjUsyJZgd"
}
}
},
Expand All @@ -11807,7 +11807,7 @@
},
"decentralized-network": {
"slug": "binance-staked-eth-bsc",
"query-id": "todo"
"query-id": "Hfb8J7aNfT8cjgDTYMjWt5Mb7yRtS1CJC4UQeWEfWNsU"
}
}
}
Expand Down Expand Up @@ -12283,7 +12283,7 @@
},
"decentralized-network": {
"slug": "dinero-pxeth-ethereum",
"query-id": "todo"
"query-id": "4WV13q7iDBhFyqprdbFX5eb2h29Qh6c7aVuCjUHX8148"
}
}
}
Expand Down Expand Up @@ -12317,7 +12317,7 @@
},
"decentralized-network": {
"slug": "stake-link-liquid-ethereum",
"query-id": "todo"
"query-id": "AV8BsSRc9oC4dr6yN7wPi4PyXsBnW3oHqbvoKfbEr1XW"
}
}
}
Expand Down Expand Up @@ -12351,7 +12351,7 @@
},
"decentralized-network": {
"slug": "gogopool-avalanche",
"query-id": "todo"
"query-id": "4fD8jH8YUR4HSyQZArgyX5RVvgPLoG6H1stzUeHSNBne"
}
}
}
Expand Down Expand Up @@ -12385,7 +12385,7 @@
},
"decentralized-network": {
"slug": "gaurda-staking-ethereum",
"query-id": "todo"
"query-id": "7Ax8MpZkfR5CnQFKRxUEGG8TuyrW5UwxohXC8opnqZ7j"
}
}
}
Expand Down Expand Up @@ -12419,7 +12419,7 @@
},
"decentralized-network": {
"slug": "tenderize-v2-ethereum",
"query-id": "todo"
"query-id": "7dyhXtGrSebgffTAduYLzbdef1EQwDWEULnVogeTDmfG"
}
}
},
Expand All @@ -12445,7 +12445,7 @@
},
"decentralized-network": {
"slug": "tenderize-v2-arbitrum",
"query-id": "todo"
"query-id": "Fx59VkMxNeoPDsCaHbjiqq6tmfFzt4LrUEtnAmv7GYrP"
}
}
}
Expand Down Expand Up @@ -12479,7 +12479,7 @@
},
"decentralized-network": {
"slug": "tensorplex-ethereum",
"query-id": "todo"
"query-id": "Fx59VkMxNeoPDsCaHbjiqq6tmfFzt4LrUEtnAmv7GYrP"
}
}
}
Expand Down Expand Up @@ -12513,7 +12513,7 @@
},
"decentralized-network": {
"slug": "paxos-gold-ethereum",
"query-id": "todo"
"query-id": "3GYHnRz961CsPut6udWqaVdDJN7imKZWjBqfyzaAbPri"
}
}
}
Expand Down Expand Up @@ -12547,6 +12547,210 @@
},
"decentralized-network": {
"slug": "tether-gold-ethereum",
"query-id": "E9VxYW2ULCFGyYGibzbq3NW3LySAwEEvbqTpKuGjTrN3"
}
}
}
}
},
"aurus": {
"schema": "generic",
"base": "aurus",
"protocol": "aurus",
"project": "aurus",
"deployments": {
"aurus-ethereum": {
"network": "ethereum",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "aurus.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "aurus-ethereum",
"query-id": "aurus-ethereum"
},
"decentralized-network": {
"slug": "aurus-ethereum",
"query-id": "E9VxYW2ULCFGyYGibzbq3NW3LySAwEEvbqTpKuGjTrN3"
}
}
}
}
},
"klimadao": {
"schema": "generic",
"base": "klimadao",
"protocol": "klimadao",
"project": "klimadao",
"deployments": {
"klimadao-polygon": {
"network": "polygon",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "klimadao.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "klimadao-polygon",
"query-id": "klimadao-polygon"
},
"decentralized-network": {
"slug": "klimadao-polygon",
"query-id": "6SasXXKVoVSFEJqEGQqUQMEL2nufjaapbNuzE7tDCuu8"
}
}
}
}
},
"matrixdock": {
"schema": "generic",
"base": "matrixdock",
"protocol": "matrixdock",
"project": "matrixdock",
"deployments": {
"matrixdock-ethereum": {
"network": "ethereum",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "matrixdock.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "matrixdock-ethereum",
"query-id": "matrixdock-ethereum"
},
"decentralized-network": {
"slug": "matrixdock-ethereum",
"query-id": "CAKvXVz7dzbF5HH4MVWXi1ozhCm1omWpmYk4n2s56tjJ"
}
}
}
}
},
"powh3d": {
"schema": "generic",
"base": "powh3d",
"protocol": "powh3d",
"project": "powh3d",
"deployments": {
"powh3d-ethereum": {
"network": "ethereum",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "powh3d.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "powh3d-ethereum",
"query-id": "powh3d-ethereum"
},
"decentralized-network": {
"slug": "powh3d-ethereum",
"query-id": "todo"
}
}
}
}
},
"nexus-mutual": {
"schema": "generic",
"base": "nexus-mutual",
"protocol": "nexus-mutual",
"project": "nexus-mutual",
"deployments": {
"nexus-mutual-ethereum": {
"network": "ethereum",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "nexus-mutual.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "nexus-mutual-ethereum",
"query-id": "nexus-mutual-ethereum"
},
"decentralized-network": {
"slug": "nexus-mutual-ethereum",
"query-id": "todo"
}
}
}
}
},
"yieldyak-staked-avax": {
"schema": "generic",
"base": "yieldyak-staked-avax",
"protocol": "yieldyak-staked-avax",
"project": "yieldyak-staked-avax",
"deployments": {
"yieldyak-staked-avax-avalanche": {
"network": "avalanche",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "yieldyak-staked-avax.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "yieldyak-staked-avax-avalanche",
"query-id": "yieldyak-staked-avax-avalanche"
},
"decentralized-network": {
"slug": "yieldyak-staked-avax-avalanche",
"query-id": "todo"
}
}
Expand Down
78 changes: 64 additions & 14 deletions deployment/produce_decentralized_network_csv.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import json
import pandas as pd
import os

try:
import pandas as pd
except ImportError:
print("Warning: pandas is not installed. Using built-in CSV writing.")
pd = None

def extract_deployment_info(data):
rows = []
Expand Down Expand Up @@ -28,20 +33,65 @@ def extract_deployment_info(data):
# Extract information
rows = extract_deployment_info(general_data)

# Create DataFrame
df_general = pd.DataFrame(rows)
if pd is not None:
# Create DataFrame
df_general = pd.DataFrame(rows)

## order by protocol type and deployment name
df_general = df_general.sort_values(by=["protocol type", "deployment name"])
## order by protocol type and deployment name
df_general = df_general.sort_values(by=["protocol type", "deployment name"])

# Save to CSV
csv_file_path_general = "decentralized_network_deployments.csv"
df_general.to_csv(csv_file_path_general, index=False)
# Save to CSV
csv_file_path_general = "decentralized_network_deployments.csv"
df_general.to_csv(csv_file_path_general, index=False)

# Print Deployment Statistics
df_prod = df_general[df_general['status'] == 'prod']
distinct_protocols = df_prod['protocol name'].nunique()
non_governance_protocols = df_prod[~df_prod['protocol name'].str.contains('governance', case=False, na=False)]['protocol name'].nunique()
# Print Deployment Statistics
df_prod = df_general[df_general['status'] == 'prod']
distinct_protocols = df_prod['protocol name'].nunique()
non_governance_protocols = df_prod[~df_prod['protocol name'].str.contains('governance', case=False, na=False)]['protocol name'].nunique()
distinct_networks = df_prod['network'].nunique()

print(f"Number of distinct protocols in production: {distinct_protocols}")
print(f"Number of non-governance protocols in production: {non_governance_protocols}")
print(f"Number of distinct protocols in production: {distinct_protocols}")
print(f"Number of non-governance protocols in production: {non_governance_protocols}")
print(f"Number of distinct networks deployed on: {distinct_networks}")

# Print number of protocols for each type
protocol_type_counts = df_prod.groupby("protocol type")["protocol name"].nunique()
print("\nNumber of protocols for each type:")
for protocol_type, count in protocol_type_counts.items():
print(f"{protocol_type}: {count}")

else:
# Fallback to built-in CSV writing and manual counting
import csv
from collections import defaultdict

with open("decentralized_network_deployments.csv", "w", newline="") as csvfile:
fieldnames = ["deployment name", "protocol name", "protocol type", "network", "status", "slug", "query-id"]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for row in rows:
writer.writerow(row)

protocol_counts = defaultdict(set)
distinct_protocols = set()
non_governance_protocols = set()
distinct_networks = set()

for row in rows:
if row["status"] == "prod":
protocol_type = row["protocol type"]
protocol_name = row["protocol name"]
network = row["network"]
protocol_counts[protocol_type].add(protocol_name)
distinct_protocols.add(protocol_name)
distinct_networks.add(network)
if "governance" not in protocol_name.lower():
non_governance_protocols.add(protocol_name)

print(f"Number of distinct protocols in production: {len(distinct_protocols)}")
print(f"Number of non-governance protocols in production: {len(non_governance_protocols)}")
print(f"Number of distinct networks deployed on: {len(distinct_networks)}")

print("\nNumber of protocols for each type:")
for protocol_type, protocols in protocol_counts.items():
print(f"{protocol_type}: {len(protocols)}")
4 changes: 4 additions & 0 deletions subgraphs/aurus/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
.DS_Store
configure.ts
docs/
package-lock.json
Loading

0 comments on commit c8973de

Please sign in to comment.