Skip to content

Commit

Permalink
Merge branch 'master' into nexus-mutual
Browse files Browse the repository at this point in the history
  • Loading branch information
dhruv-chauhan committed Oct 10, 2024
2 parents 745bca9 + 211599e commit dffad17
Show file tree
Hide file tree
Showing 37 changed files with 4,933 additions and 27 deletions.
333 changes: 333 additions & 0 deletions deployment/decentralized_network_deployments.csv

Large diffs are not rendered by default.

60 changes: 47 additions & 13 deletions deployment/deployment.json
Original file line number Diff line number Diff line change
Expand Up @@ -11781,7 +11781,7 @@
},
"decentralized-network": {
"slug": "binance-staked-eth-ethereum",
"query-id": "todo"
"query-id": "EdxuQc4V8zGV9f34KZJhUNmBrfdPWMmbjVTzjUsyJZgd"
}
}
},
Expand All @@ -11807,7 +11807,7 @@
},
"decentralized-network": {
"slug": "binance-staked-eth-bsc",
"query-id": "todo"
"query-id": "Hfb8J7aNfT8cjgDTYMjWt5Mb7yRtS1CJC4UQeWEfWNsU"
}
}
}
Expand Down Expand Up @@ -12283,7 +12283,7 @@
},
"decentralized-network": {
"slug": "dinero-pxeth-ethereum",
"query-id": "todo"
"query-id": "4WV13q7iDBhFyqprdbFX5eb2h29Qh6c7aVuCjUHX8148"
}
}
}
Expand Down Expand Up @@ -12317,7 +12317,7 @@
},
"decentralized-network": {
"slug": "stake-link-liquid-ethereum",
"query-id": "todo"
"query-id": "AV8BsSRc9oC4dr6yN7wPi4PyXsBnW3oHqbvoKfbEr1XW"
}
}
}
Expand Down Expand Up @@ -12351,7 +12351,7 @@
},
"decentralized-network": {
"slug": "gogopool-avalanche",
"query-id": "todo"
"query-id": "4fD8jH8YUR4HSyQZArgyX5RVvgPLoG6H1stzUeHSNBne"
}
}
}
Expand Down Expand Up @@ -12385,7 +12385,7 @@
},
"decentralized-network": {
"slug": "gaurda-staking-ethereum",
"query-id": "todo"
"query-id": "7Ax8MpZkfR5CnQFKRxUEGG8TuyrW5UwxohXC8opnqZ7j"
}
}
}
Expand Down Expand Up @@ -12419,7 +12419,7 @@
},
"decentralized-network": {
"slug": "tenderize-v2-ethereum",
"query-id": "todo"
"query-id": "7dyhXtGrSebgffTAduYLzbdef1EQwDWEULnVogeTDmfG"
}
}
},
Expand All @@ -12445,7 +12445,7 @@
},
"decentralized-network": {
"slug": "tenderize-v2-arbitrum",
"query-id": "todo"
"query-id": "Fx59VkMxNeoPDsCaHbjiqq6tmfFzt4LrUEtnAmv7GYrP"
}
}
}
Expand Down Expand Up @@ -12479,7 +12479,7 @@
},
"decentralized-network": {
"slug": "tensorplex-ethereum",
"query-id": "todo"
"query-id": "Fx59VkMxNeoPDsCaHbjiqq6tmfFzt4LrUEtnAmv7GYrP"
}
}
}
Expand Down Expand Up @@ -12513,7 +12513,7 @@
},
"decentralized-network": {
"slug": "paxos-gold-ethereum",
"query-id": "todo"
"query-id": "3GYHnRz961CsPut6udWqaVdDJN7imKZWjBqfyzaAbPri"
}
}
}
Expand Down Expand Up @@ -12547,7 +12547,7 @@
},
"decentralized-network": {
"slug": "tether-gold-ethereum",
"query-id": "todo"
"query-id": "E9VxYW2ULCFGyYGibzbq3NW3LySAwEEvbqTpKuGjTrN3"
}
}
}
Expand Down Expand Up @@ -12581,7 +12581,7 @@
},
"decentralized-network": {
"slug": "aurus-ethereum",
"query-id": "todo"
"query-id": "E9VxYW2ULCFGyYGibzbq3NW3LySAwEEvbqTpKuGjTrN3"
}
}
}
Expand Down Expand Up @@ -12615,7 +12615,7 @@
},
"decentralized-network": {
"slug": "klimadao-polygon",
"query-id": "todo"
"query-id": "6SasXXKVoVSFEJqEGQqUQMEL2nufjaapbNuzE7tDCuu8"
}
}
}
Expand Down Expand Up @@ -12649,6 +12649,40 @@
},
"decentralized-network": {
"slug": "matrixdock-ethereum",
"query-id": "CAKvXVz7dzbF5HH4MVWXi1ozhCm1omWpmYk4n2s56tjJ"
}
}
}
}
},
"powh3d": {
"schema": "generic",
"base": "powh3d",
"protocol": "powh3d",
"project": "powh3d",
"deployments": {
"powh3d-ethereum": {
"network": "ethereum",
"status": "prod",
"versions": {
"schema": "3.0.0",
"subgraph": "1.0.0",
"methodology": "1.0.0"
},
"files": {
"template": "powh3d.template.yaml"
},
"options": {
"prepare:yaml": true,
"prepare:constants": true
},
"services": {
"hosted-service": {
"slug": "powh3d-ethereum",
"query-id": "powh3d-ethereum"
},
"decentralized-network": {
"slug": "powh3d-ethereum",
"query-id": "todo"
}
}
Expand Down
78 changes: 64 additions & 14 deletions deployment/produce_decentralized_network_csv.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import json
import pandas as pd
import os

try:
import pandas as pd
except ImportError:
print("Warning: pandas is not installed. Using built-in CSV writing.")
pd = None

def extract_deployment_info(data):
rows = []
Expand Down Expand Up @@ -28,20 +33,65 @@ def extract_deployment_info(data):
# Extract information
rows = extract_deployment_info(general_data)

# Create DataFrame
df_general = pd.DataFrame(rows)
if pd is not None:
# Create DataFrame
df_general = pd.DataFrame(rows)

## order by protocol type and deployment name
df_general = df_general.sort_values(by=["protocol type", "deployment name"])
## order by protocol type and deployment name
df_general = df_general.sort_values(by=["protocol type", "deployment name"])

# Save to CSV
csv_file_path_general = "decentralized_network_deployments.csv"
df_general.to_csv(csv_file_path_general, index=False)
# Save to CSV
csv_file_path_general = "decentralized_network_deployments.csv"
df_general.to_csv(csv_file_path_general, index=False)

# Print Deployment Statistics
df_prod = df_general[df_general['status'] == 'prod']
distinct_protocols = df_prod['protocol name'].nunique()
non_governance_protocols = df_prod[~df_prod['protocol name'].str.contains('governance', case=False, na=False)]['protocol name'].nunique()
# Print Deployment Statistics
df_prod = df_general[df_general['status'] == 'prod']
distinct_protocols = df_prod['protocol name'].nunique()
non_governance_protocols = df_prod[~df_prod['protocol name'].str.contains('governance', case=False, na=False)]['protocol name'].nunique()
distinct_networks = df_prod['network'].nunique()

print(f"Number of distinct protocols in production: {distinct_protocols}")
print(f"Number of non-governance protocols in production: {non_governance_protocols}")
print(f"Number of distinct protocols in production: {distinct_protocols}")
print(f"Number of non-governance protocols in production: {non_governance_protocols}")
print(f"Number of distinct networks deployed on: {distinct_networks}")

# Print number of protocols for each type
protocol_type_counts = df_prod.groupby("protocol type")["protocol name"].nunique()
print("\nNumber of protocols for each type:")
for protocol_type, count in protocol_type_counts.items():
print(f"{protocol_type}: {count}")

else:
# Fallback to built-in CSV writing and manual counting
import csv
from collections import defaultdict

with open("decentralized_network_deployments.csv", "w", newline="") as csvfile:
fieldnames = ["deployment name", "protocol name", "protocol type", "network", "status", "slug", "query-id"]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for row in rows:
writer.writerow(row)

protocol_counts = defaultdict(set)
distinct_protocols = set()
non_governance_protocols = set()
distinct_networks = set()

for row in rows:
if row["status"] == "prod":
protocol_type = row["protocol type"]
protocol_name = row["protocol name"]
network = row["network"]
protocol_counts[protocol_type].add(protocol_name)
distinct_protocols.add(protocol_name)
distinct_networks.add(network)
if "governance" not in protocol_name.lower():
non_governance_protocols.add(protocol_name)

print(f"Number of distinct protocols in production: {len(distinct_protocols)}")
print(f"Number of non-governance protocols in production: {len(non_governance_protocols)}")
print(f"Number of distinct networks deployed on: {len(distinct_networks)}")

print("\nNumber of protocols for each type:")
for protocol_type, protocols in protocol_counts.items():
print(f"{protocol_type}: {len(protocols)}")
4 changes: 4 additions & 0 deletions subgraphs/powh3d/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
.DS_Store
configure.ts
docs/
package-lock.json
17 changes: 17 additions & 0 deletions subgraphs/powh3d/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Powh 3D Subgraph

## Methodology v1.0.0

## Metrics

### Usage and Transactions

### TVL

### Revenue

## Useful Links

- Landing Page:
- Docs:
- Contracts:
Loading

0 comments on commit dffad17

Please sign in to comment.