Skip to content

Commit

Permalink
Merge branch 'duneanalytics:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
danielpartida authored Sep 19, 2024
2 parents e489553 + 7e1a69c commit e5c90a1
Show file tree
Hide file tree
Showing 1,266 changed files with 46,667 additions and 23,936 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ dbt_macros/dune/create_views_of_dependencies.sql
dbt_macros/dune/alter_table_locations.sql
scripts/integration_test/test_diffs_tokens/*
.vs
dbt_subprojects/manifests/*


# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down Expand Up @@ -146,3 +148,4 @@ logs/
!target/manifest.json
!target/run_results.json
!target/sources.json

9 changes: 7 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ Please note: the test tables built in the CI pipeline will exist for ~24 hours.

### Connecting with other wizards

We use Discord to connect with our community. Head over to spellbook channel on [Dune's Discord](https://discord.gg/dunecom) for questions or to ask for help with a particular PR. We encourage you to learn by doing, and leverage our vibrant community to help you get going.
We use Discord to connect with our community. Head over to spellbook channel on [Dune's Discord](https://discord.com/invite/ErrzwBz) for questions or to ask for help with a particular PR. We encourage you to learn by doing, and leverage our vibrant community to help you get going.

## Setting up your Local Dev Environment

Expand All @@ -118,6 +118,12 @@ cd user\directory\github\spellbook
# Change this to wherever spellbook is stored locally on your machine.
```

Within Spellbook repo, there are multiple dbt projects, located in the root directory. Navigate to the correct project, depending on your use case.

```console
cd ../spellbook/dbt_subprojects/<subproject_name>/
```

Using the pipfile located in the spellbook repo, run the below install command to create a pipenv.

```console
Expand Down Expand Up @@ -199,7 +205,6 @@ models:
error_after: { count: 24, period: hour }
tables:
- name: traces
loaded_at_field: block_time
```
See links to more docs on dbt below.
Expand Down
5 changes: 3 additions & 2 deletions dbt_macros/shared/balancer/balancer_liquidity_macro.sql
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ WITH pool_labels AS (
date_trunc('day', minute) AS day,
contract_address AS token,
decimals,
AVG(price) AS price
APPROX_PERCENTILE(price, 0.5) AS price
FROM {{ source('prices', 'usd') }}
WHERE blockchain = '{{blockchain}}'
GROUP BY 1, 2, 3
Expand Down Expand Up @@ -74,7 +74,8 @@ WITH pool_labels AS (
DATE_TRUNC('day', minute) as day,
AVG(price) as eth_price
FROM {{ source('prices', 'usd') }}
WHERE symbol = 'ETH'
WHERE blockchain = 'ethereum'
AND contract_address = 0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2
GROUP BY 1
),

Expand Down
162 changes: 162 additions & 0 deletions dbt_macros/shared/balances_incremental_subset_daily.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
{# @DEV here

@NOTICE this macro constructs the address level token balances table for given input table
@NOTICE aka, you give lists of tokens and/or address, it generates table with daily balances of the address-token pair

@PARAM blockchain -- blockchain name
@PARAM address_list -- must have an address column, can be none if only filtering on tokens
@PARAM token_list -- must have a token_address column, can be none if only filtering on tokens
@PARAM address_token_list -- for advanced usage, must have both (address, token_address) columns, can be none
@PARAM start_date -- the start_date, used to generate the daily timeseries

#}

{%- macro balances_incremental_subset_daily(
blockchain,
start_date,
address_list = none,
token_list = none,
address_token_list = none
)
%}

WITH
filtered_daily_agg_balances as (
select
b.blockchain,
b.day,
b.block_number,
b.block_time,
b.address,
b.token_address,
b.token_standard,
b.balance_raw,
CASE
WHEN b.token_standard = 'erc20' THEN b.balance_raw / power(10, erc20_tokens.decimals)
WHEN b.token_standard = 'native' THEN b.balance_raw / power(10, 18)
ELSE b.balance_raw
END as balance,
erc20_tokens.symbol as token_symbol,
token_id
from {{source('tokens_'~blockchain,'balances_daily_agg_base')}} b
{% if address_list is not none %}
inner join (select distinct address from {{address_list}}) f1
on f1.address = b.address
{% endif %}
{% if token_list is not none %}
inner join (select distinct token_address from {{token_list}}) f2
on f2.token_address = b.token_address
{% endif %}
{% if address_token_list is not none %}
inner join (select distinct address, token_address from {{address_token_list}}) f3
on f3.token_address = b.token_address
and f3.address = b.address
{% endif %}
left join {{ source('tokens', 'erc20') }} erc20_tokens on
erc20_tokens.blockchain = '{{blockchain}}'
AND erc20_tokens.contract_address = b.token_address
AND b.token_standard = 'erc20'
where day >= cast('{{start_date}}' as date)

)
,changed_balances as (
select *
, lead(cast(day as timestamp)) over (partition by token_address,address,token_id order by day asc) as next_update_day
from (
select * from (
select
blockchain
,day
,address
,token_symbol
,token_address
,token_standard
,token_id
,balance
from filtered_daily_agg_balances
where day >= cast('{{start_date}}' as date)
{% if is_incremental() %}
and {{ incremental_predicate('day') }}
{% endif %}
)
-- if we're running incremental, we need to retrieve the last known balance updates from before the current window
-- so we can correctly populate the forward fill.
{% if is_incremental() %}
UNION ALL
select * from (
select
blockchain
,max(day) as day
,address
,token_symbol
,token_address
,token_standard
,token_id
,max_by(balance, day) as balance
from filtered_daily_agg_balances
where day >= cast('{{start_date}}' as date)
and not {{ incremental_predicate('day') }}
group by 1,3,4,5,6,7
)
{% endif %}
)
),

days as (
select *
from unnest(
sequence(cast('{{start_date}}' as date)
, date(date_trunc('day',now()))
, interval '1' day
)
) as foo(day)
),
forward_fill as (
select
blockchain,
cast(d.day as timestamp) as day,
address,
token_symbol,
token_address,
token_standard,
token_id,
balance,
b.day as last_updated,
b.next_update_day as next_update
from days d
left join changed_balances b
ON d.day >= b.day
and (b.next_update_day is null OR d.day < b.next_update_day) -- perform forward fill
)

select
b.blockchain,
b.day,
b.address,
b.token_symbol,
b.token_address,
b.token_standard,
b.token_id,
b.balance,
b.balance * p.price as balance_usd,
b.last_updated,
b.next_update
from(
select * from forward_fill
where balance > 0
{% if is_incremental() %}
and {{ incremental_predicate('day') }}
{% endif %}

) b
left join {{source('prices','usd')}} p
on (token_standard = 'erc20'
and p.blockchain = '{{blockchain}}'
and b.token_address = p.contract_address
and b.day = p.minute)
or (token_standard = 'native'
and p.blockchain is null
and p.contract_address is null
and p.symbol = (select native_token_symbol from {{source('evms','info')}} where blockchain = '{{blockchain}}')
and b.day = p.minute)
{% endmacro %}
5 changes: 0 additions & 5 deletions dbt_subprojects/daily_spellbook/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -289,11 +289,6 @@ models:
fantom:
+schema: beethoven_x_fantom

rocifi:
+schema: rocifi
polygon:
+schema: rocifi_polygon

pooltogether:
+schema: pooltogether
ethereum:
Expand Down
Loading

0 comments on commit e5c90a1

Please sign in to comment.