-
- Channel ID |
- Peer Alias |
- Capacity |
- Outbound Liquidity |
- |
- Inbound Liquidity |
- Rebal Out? |
- Enabled? |
- Fee Ratio |
- Rebal In? |
- Target Amt |
- Max Cost % |
- oTarget% |
- iTarget% |
- AR |
- 7-Day Rate |
- Active |
-
- {% for channel in channels %}
-
- {{ channel.short_chan_id }} |
- {% if channel.alias == '' %}{{ channel.remote_pubkey|slice:":12" }}{% else %}{{ channel.alias }}{% endif %} |
- {{ channel.capacity|intcomma }} |
- {{ channel.local_balance|intcomma }} ({{ channel.percent_outbound }}%) |
- {% if channel.percent_inbound == 0 %} {% elif channel.percent_outbound == 0 %} {% else %} {% endif %} |
- {{ channel.remote_balance|intcomma }} ({{ channel.percent_inbound }}%) |
- = channel.ar_out_target and channel.auto_rebalance == False %}style="background-color: #a6dce2">True{% else %}style="background-color: #fadbd5">False{% endif %} |
- True{% else %}style="background-color: #fadbd5">False{% endif %} |
- {{ channel.fee_ratio }}%{% else %}style="background-color: #fadbd5">{{ channel.fee_ratio }}%{% endif %} |
- = 1 and channel.fee_check < 100 and channel.auto_rebalance == True %}style="background-color: #a6dce2">True ({{ channel.steps }}){% else %}style="background-color: #fadbd5">False{% endif %} |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
- {% if channel.attempts == 0 %}---{% else %}{{ channel.success_rate }}% ({{ channel.success }}/{{ channel.attempts }}){% endif %} |
- True{% else %}style="background-color: #fadbd5">False{% endif %} |
-
- {% endfor %}
+
-{% endif %}
-{% if not channels %}
-
-
You dont have any channels to rebalance yet.
-
-{% endif %}
-{% if rebalancer %}
-
-
-
-
- Requested |
- Start |
- Stop |
- Scheduled Duration |
- Actual Duration |
- Value |
- Fee Limit |
- Target PPM |
- Fees Paid |
- Last Hop Alias |
- Status |
- Hash |
-
- {% for rebalance in rebalancer %}
-
- {{ rebalance.requested|naturaltime }} |
- ---{% else %}title="{{ rebalance.start }}">{{ rebalance.start|naturaltime }}{% endif %} |
- 1 %}title="{{ rebalance.stop }}">{{ rebalance.stop|naturaltime }}{% else %}>---{% endif %} |
- {{ rebalance.duration }} minutes |
- {% if rebalance.status == 2 %}{{ rebalance.stop|timeuntil:rebalance.start }}{% else %}---{% endif %} |
- {{ rebalance.value|intcomma }} |
- {{ rebalance.fee_limit|intcomma }} |
- {{ rebalance.ppm|intcomma }} |
- {% if rebalance.status == 2 %}{{ rebalance.fees_paid|intcomma }}{% else %}---{% endif %} |
- {% if rebalance.target_alias == '' %}---{% else %}{{ rebalance.target_alias }}{% endif %} |
- {% if rebalance.status == 0 %}Pending{% elif rebalance.status == 1 %}In-Flight{% elif rebalance.status == 2 %}Successful{% elif rebalance.status == 3 %}Timeout{% elif rebalance.status == 4 %}No Route{% elif rebalance.status == 5 %}Error{% elif rebalance.status == 6 %}Incorrect Payment Details{% elif rebalance.status == 7 %}Insufficient Balance{% elif rebalance.status == 400 %}Rebalancer Request Failed{% elif rebalance.status == 408 %}Rebalancer Request Timeout{% else %}{{ rebalance.status }}{% endif %} |
- {% if rebalance.payment_hash == '' %}---{% else %}{{ rebalance.payment_hash }}{% endif %} |
-
- {% endfor %}
-
-
-{% endif %}
-{% if not rebalancer %}
-
-
You dont have any rebalancer requests yet.
-
-{% endif %}
-{% if local_settings %}
-
-
Auto-Rebalancer Settings
-
-
- Key |
- Value |
-
- {% for settings in local_settings %}
-
- {{ settings.key }} |
- {{ settings.value|intcomma }} |
-
- {% endfor %}
-
-
-{% endif %}
-
-
Update Auto Rebalancer Settings
-
-
-
-
-{% if channels %}
-
-
Manual Rebalancer Request
-
-{% endif %}
+{% include 'rebalances_table.html' with count=20 title='
Rebalance Requests' %}
+{% include 'local_settings.html' with settings=local_settings title='Auto-Rebalancer' %}
+
{% endblock %}
diff --git a/gui/templates/route.html b/gui/templates/route.html
index 5094f1f5..83d86cd8 100644
--- a/gui/templates/route.html
+++ b/gui/templates/route.html
@@ -40,41 +40,7 @@
Route For : {{ payment_hash }}{% if total_cost %} | Total Costs: {{ total_co
A route was not found for this payment hash!
{% endif %}
-{% if rebalances %}
-
-
Associated Rebalances
-
-
- Requested |
- Start |
- Stop |
- Scheduled Duration |
- Actual Duration |
- Value |
- Fee Limit |
- Target PPM |
- Fees Paid |
- Last Hop Alias |
- Status |
-
- {% for rebalance in rebalances %}
-
- {{ rebalance.requested|naturaltime }} |
- ---{% else %}title="{{ rebalance.start }}">{{ rebalance.start|naturaltime }}{% endif %} |
- 1 %}title="{{ rebalance.stop }}">{{ rebalance.stop|naturaltime }}{% else %}>---{% endif %} |
- {{ rebalance.duration }} minutes |
- {% if rebalance.status == 2 %}{{ rebalance.stop|timeuntil:rebalance.start }}{% else %}---{% endif %} |
- {{ rebalance.value|intcomma }} |
- {{ rebalance.fee_limit|intcomma }} |
- {{ rebalance.ppm|intcomma }} |
- {% if rebalance.status == 2 %}{{ rebalance.fees_paid|intcomma }}{% else %}---{% endif %} |
- {% if rebalance.target_alias == '' %}None Specified{% else %}{{ rebalance.target_alias }}{% endif %} |
- {% if rebalance.status == 0 %}Pending{% elif rebalance.status == 1 %}In-Flight{% elif rebalance.status == 2 %}Successful{% elif rebalance.status == 3 %}Timeout{% elif rebalance.status == 4 %}No Route{% elif rebalance.status == 5 %}Error{% elif rebalance.status == 6 %}Incorrect Payment Details{% elif rebalance.status == 7 %}Insufficient Balance{% elif rebalance.status == 400 %}Rebalancer Request Failed{% elif rebalance.status == 408 %}Rebalancer Request Timeout{% else %}{{ rebalance.status }}{% endif %} |
-
- {% endfor %}
-
-
-{% endif %}
+{% include 'rebalances_table.html' with count=150 title='Rebalances' payment_hash=payment_hash %}
{% if invoices %}
Linked Invoice
diff --git a/gui/urls.py b/gui/urls.py
index 08c0b2a8..a7acaf17 100644
--- a/gui/urls.py
+++ b/gui/urls.py
@@ -47,7 +47,7 @@
path('newaddress/', views.new_address_form, name='new-address-form'),
path('createinvoice/', views.add_invoice_form, name='add-invoice-form'),
path('rebalancer/', views.rebalance, name='rebalancer'),
- path('autorebalance/', views.auto_rebalance, name='auto-rebalance'),
+ path('update_settings/', views.update_settings, name='update-settings'),
path('update_channel/', views.update_channel, name='update-channel'),
path('update_pending/', views.update_pending, name='update-pending'),
path('update_setting/', views.update_setting, name='update-setting'),
@@ -65,9 +65,11 @@
path('autofees/', views.autofees, name='autofees'),
path('peerevents/', views.peerevents, name='peerevents'),
path('advanced/', views.advanced, name='advanced'),
+ path('sign_message/', views.sign_message, name='sign-message'),
path('api/', include(router.urls), name='api-root'),
path('api-auth/', include('rest_framework.urls'), name='api-auth'),
path('api/connectpeer/', views.connect_peer, name='connect-peer'),
+ path('api/rebalance_stats/', views.rebalance_stats, name='rebalance-stats'),
path('api/openchannel/', views.open_channel, name='open-channel'),
path('api/closechannel/', views.close_channel, name='close-channel'),
path('api/createinvoice/', views.add_invoice, name='add-invoice'),
@@ -77,5 +79,6 @@
path('api/balances/', views.api_balances, name='api-balances'),
path('api/income/', views.api_income, name='api-income'),
path('api/pendingchannels/', views.pending_channels, name='pending-channels'),
+ path('api/bumpfee/', views.bump_fee, name='bump-fee'),
path('lndg-admin/', admin.site.urls),
]
diff --git a/gui/views.py b/gui/views.py
index 7922a5ae..40726be3 100644
--- a/gui/views.py
+++ b/gui/views.py
@@ -1,6 +1,6 @@
from django.contrib import messages
from django.shortcuts import get_object_or_404, render, redirect
-from django.db.models import Sum, IntegerField, Count, F, Q
+from django.db.models import Sum, IntegerField, Count, Max, F, Q, Case, When
from django.db.models.functions import Round
from django.contrib.auth.decorators import login_required
from datetime import datetime, timedelta
@@ -8,15 +8,17 @@
from rest_framework.response import Response
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
-from .forms import OpenChannelForm, CloseChannelForm, ConnectPeerForm, AddInvoiceForm, RebalancerForm, UpdateChannel, UpdateSetting, AutoRebalanceForm, AddTowerForm, RemoveTowerForm, DeleteTowerForm, BatchOpenForm, UpdatePending, UpdateClosing, UpdateKeysend, AddAvoid, RemoveAvoid
+from .forms import OpenChannelForm, CloseChannelForm, ConnectPeerForm, AddInvoiceForm, RebalancerForm, UpdateChannel, UpdateSetting, LocalSettingsForm, AddTowerForm, RemoveTowerForm, DeleteTowerForm, BatchOpenForm, UpdatePending, UpdateClosing, UpdateKeysend, AddAvoid, RemoveAvoid
from .models import Payments, PaymentHops, Invoices, Forwards, Channels, Rebalancer, LocalSettings, Peers, Onchain, Closures, Resolutions, PendingHTLCs, FailedHTLCs, Autopilot, Autofees, PendingChannels, AvoidNodes, PeerEvents
-from .serializers import ConnectPeerSerializer, FailedHTLCSerializer, LocalSettingsSerializer, OpenChannelSerializer, CloseChannelSerializer, AddInvoiceSerializer, PaymentHopsSerializer, PaymentSerializer, InvoiceSerializer, ForwardSerializer, ChannelSerializer, PendingHTLCSerializer, RebalancerSerializer, UpdateAliasSerializer, PeerSerializer, OnchainSerializer, ClosuresSerializer, ResolutionsSerializer
+from .serializers import ConnectPeerSerializer, FailedHTLCSerializer, LocalSettingsSerializer, OpenChannelSerializer, CloseChannelSerializer, AddInvoiceSerializer, PaymentHopsSerializer, PaymentSerializer, InvoiceSerializer, ForwardSerializer, ChannelSerializer, PendingHTLCSerializer, RebalancerSerializer, UpdateAliasSerializer, PeerSerializer, OnchainSerializer, ClosuresSerializer, ResolutionsSerializer, BumpFeeSerializer
from gui.lnd_deps import lightning_pb2 as ln
from gui.lnd_deps import lightning_pb2_grpc as lnrpc
from gui.lnd_deps import router_pb2 as lnr
from gui.lnd_deps import router_pb2_grpc as lnrouter
from gui.lnd_deps import wtclient_pb2 as wtrpc
from gui.lnd_deps import wtclient_pb2_grpc as wtstub
+from gui.lnd_deps import walletkit_pb2 as walletrpc
+from gui.lnd_deps import walletkit_pb2_grpc as walletstub
from gui.lnd_deps.lnd_connect import lnd_connect
from lndg import settings
from os import path
@@ -27,8 +29,8 @@ def graph_links():
if LocalSettings.objects.filter(key='GUI-GraphLinks').exists():
graph_links = str(LocalSettings.objects.filter(key='GUI-GraphLinks')[0].value)
else:
- LocalSettings(key='GUI-GraphLinks', value='https://1ml.com').save()
- graph_links = 'https://1ml.com'
+ LocalSettings(key='GUI-GraphLinks', value='https://amboss.space').save()
+ graph_links = 'https://amboss.space'
return graph_links
def network_links():
@@ -154,9 +156,9 @@ def home(request):
#Get recorded invoice details
invoices = Invoices.objects.exclude(state=2)
#Get recorded forwarding events
- forwards = Forwards.objects.all().annotate(amt_in=Sum('amt_in_msat')/1000).annotate(amt_out=Sum('amt_out_msat')/1000).annotate(ppm=Round((Sum('fee')*1000000000)/Sum('amt_out_msat'), output_field=IntegerField())).order_by('-id')
+ forwards = Forwards.objects.all().annotate(amt_in=Sum('amt_in_msat')/1000, amt_out=Sum('amt_out_msat')/1000, ppm=Round((Sum('fee')*1000000000)/Sum('amt_out_msat'), output_field=IntegerField())).order_by('-id')
#Get current active channels
- active_channels = channels.filter(is_active=True, is_open=True, private=False).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*1000)/Sum('capacity')).annotate(inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*1000)/Sum('capacity')).order_by('outbound_percent')
+ active_channels = channels.filter(is_active=True, is_open=True, private=False).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*1000)/Sum('capacity'), inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*1000)/Sum('capacity')).order_by('outbound_percent')
active_capacity = 0 if active_channels.count() == 0 else active_channels.aggregate(Sum('capacity'))['capacity__sum']
active_inbound = 0 if active_capacity == 0 else active_channels.aggregate(Sum('remote_balance'))['remote_balance__sum']
active_outbound = 0 if active_capacity == 0 else active_channels.aggregate(Sum('local_balance'))['local_balance__sum']
@@ -234,9 +236,9 @@ def home(request):
available_count += 1
detailed_active_channels.append(detailed_channel)
#Get current inactive channels
- inactive_channels = channels.filter(is_active=False, is_open=True, private=False).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).annotate(inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity')).order_by('outbound_percent')
+ inactive_channels = channels.filter(is_active=False, is_open=True, private=False).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity'), inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity')).order_by('outbound_percent')
inactive_capacity = 0 if inactive_channels.count() == 0 else inactive_channels.aggregate(Sum('capacity'))['capacity__sum']
- private_channels = channels.filter(is_open=True, private=True).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).annotate(inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity')).order_by('outbound_percent')
+ private_channels = channels.filter(is_open=True, private=True).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity'), inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity')).order_by('outbound_percent')
inactive_outbound = 0 if inactive_channels.count() == 0 else inactive_channels.aggregate(Sum('local_balance'))['local_balance__sum']
inactive_inbound = 0 if inactive_channels.count() == 0 else inactive_channels.aggregate(Sum('remote_balance'))['remote_balance__sum']
private_count = private_channels.count()
@@ -257,10 +259,9 @@ def home(request):
total_costs_7day = total_7day_fees + onchain_costs_7day
total_costs_1day = total_1day_fees + onchain_costs_1day
#Get list of recent rebalance requests
- rebalances = Rebalancer.objects.all().annotate(ppm=Round((Sum('fee_limit')*1000000)/Sum('value'), output_field=IntegerField())).order_by('-id')
active_count = node_info.num_active_channels - active_private
total_channels = node_info.num_active_channels + node_info.num_inactive_channels - private_count
- local_settings = LocalSettings.objects.filter(key__contains='AR-').order_by('key')
+ local_settings = get_local_settings('AR-')
try:
db_size = round(path.getsize(path.expanduser(settings.LND_DATABASE_PATH))*0.000000001, 3)
except:
@@ -315,10 +316,9 @@ def home(request):
'pending_closed': pending_closed,
'pending_force_closed': pending_force_closed,
'waiting_for_close': waiting_for_close,
- 'rebalances': rebalances[:12],
'local_settings': local_settings,
'pending_htlc_count': pending_htlc_count,
- 'failed_htlcs': FailedHTLCs.objects.all().order_by('-id')[:10],
+ 'failed_htlcs': FailedHTLCs.objects.exclude(wire_failure=99).order_by('-id')[:10],
'1day_routed_ppm': 0 if routed_1day_amt == 0 else int((total_earned_1day/routed_1day_amt)*1000000),
'7day_routed_ppm': 0 if routed_7day_amt == 0 else int((total_earned_7day/routed_7day_amt)*1000000),
'1day_payments_ppm': 0 if payments_1day_amt == 0 else int((total_1day_fees/payments_1day_amt)*1000000),
@@ -476,7 +476,7 @@ def fees(request):
else:
LocalSettings(key='AF-UpdateHours', value='24').save()
update_hours = 24
- failed_htlc_df = DataFrame.from_records(FailedHTLCs.objects.filter(timestamp__gte=filter_1day).order_by('-id').values())
+ failed_htlc_df = DataFrame.from_records(FailedHTLCs.objects.exclude(wire_failure=99).filter(timestamp__gte=filter_1day).order_by('-id').values())
if failed_htlc_df.shape[0] > 0:
failed_htlc_df = failed_htlc_df[(failed_htlc_df['wire_failure']==15) & (failed_htlc_df['failure_detail']==6) & (failed_htlc_df['amount']>failed_htlc_df['chan_out_liq']+failed_htlc_df['chan_out_pending'])]
forwards = Forwards.objects.filter(forward_date__gte=filter_7day, amt_out_msat__gte=1000000)
@@ -527,7 +527,7 @@ def fees(request):
channels_df['eligible'] = channels_df.apply(lambda row: (datetime.now()-row['fees_updated']).total_seconds() > (update_hours*3600), axis=1)
context = {
'channels': [] if channels_df.empty else channels_df.sort_values(by=['out_percent']).to_dict(orient='records'),
- 'local_settings': LocalSettings.objects.filter(key__contains='AF-').order_by('key'),
+ 'local_settings': get_local_settings('AF-'),
'network': 'testnet/' if settings.LND_NETWORK == 'testnet' else '',
'graph_links': graph_links(),
'network_links': network_links()
@@ -539,7 +539,7 @@ def fees(request):
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
def advanced(request):
if request.method == 'GET':
- channels = Channels.objects.filter(is_open=True).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*1000)/Sum('capacity')).annotate(inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*1000)/Sum('capacity')).order_by('-is_active', 'outbound_percent')
+ channels = Channels.objects.filter(is_open=True).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*1000)/Sum('capacity'), inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*1000)/Sum('capacity')).order_by('-is_active', 'outbound_percent')
channels_df = DataFrame.from_records(channels.values())
if channels_df.shape[0] > 0:
channels_df['out_percent'] = channels_df.apply(lambda row: int(round(row['outbound_percent']/10, 0)), axis=1)
@@ -551,7 +551,7 @@ def advanced(request):
channels_df['local_max_htlc'] = channels_df['local_max_htlc_msat']/1000
context = {
'channels': channels_df.to_dict(orient='records'),
- 'local_settings': LocalSettings.objects.all().order_by('key'),
+ 'local_settings': get_local_settings('AF-', 'AR-', 'GUI-', 'LND-'),
'network': 'testnet/' if settings.LND_NETWORK == 'testnet' else '',
'graph_links': graph_links(),
'network_links': network_links()
@@ -575,10 +575,9 @@ def route(request):
'total_cost': total_cost,
'total_ppm': total_ppm,
'route': route,
- 'rebalances': Rebalancer.objects.filter(payment_hash=payment_hash).annotate(ppm=Round((Sum('fee_limit')*1000000)/Sum('value'), output_field=IntegerField())),
'invoices': Invoices.objects.filter(r_hash=payment_hash),
- 'incoming_htlcs': PendingHTLCs.objects.filter(incoming=True, hash_lock=payment_hash).annotate(blocks_til_expiration=Sum('expiration_height')-block_height).annotate(hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock'),
- 'outgoing_htlcs': PendingHTLCs.objects.filter(incoming=False, hash_lock=payment_hash).annotate(blocks_til_expiration=Sum('expiration_height')-block_height).annotate(hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock')
+ 'incoming_htlcs': PendingHTLCs.objects.filter(incoming=True, hash_lock=payment_hash).annotate(blocks_til_expiration=Sum('expiration_height')-block_height, hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock'),
+ 'outgoing_htlcs': PendingHTLCs.objects.filter(incoming=False, hash_lock=payment_hash).annotate(blocks_til_expiration=Sum('expiration_height')-block_height, hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock')
}
return render(request, 'route.html', context)
except Exception as e:
@@ -620,9 +619,9 @@ def peers(request):
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
def balances(request):
if request.method == 'GET':
- stub = lnrpc.LightningStub(lnd_connect())
+ stub = walletstub.WalletKitStub(lnd_connect())
context = {
- 'utxos': stub.ListUnspent(ln.ListUnspentRequest(min_confs=0, max_confs=9999999)).utxos,
+ 'utxos': stub.ListUnspent(walletrpc.ListUnspentRequest(min_confs=0, max_confs=9999999)).utxos,
'transactions': list(Onchain.objects.filter(block_height=0)) + list(Onchain.objects.exclude(block_height=0).order_by('-block_height')),
'network': 'testnet/' if settings.LND_NETWORK == 'testnet' else '',
'network_links': network_links()
@@ -999,7 +998,7 @@ def channel(request):
node_capacity = channels_df['capacity'].sum()
channels_df = DataFrame.from_records(Channels.objects.filter(chan_id=chan_id).values())
rebalancer_df = DataFrame.from_records(Rebalancer.objects.filter(last_hop_pubkey=channels_df['remote_pubkey'][0]).annotate(ppm=Round((Sum('fee_limit')*1000000)/Sum('value'), output_field=IntegerField())).order_by('-id').values())
- failed_htlc_df = DataFrame.from_records(FailedHTLCs.objects.filter(Q(chan_id_in=chan_id) | Q(chan_id_out=chan_id)).order_by('-id').values())
+ failed_htlc_df = DataFrame.from_records(FailedHTLCs.objects.exclude(wire_failure=99).filter(Q(chan_id_in=chan_id) | Q(chan_id_out=chan_id)).order_by('-id').values())
peer_info_df = DataFrame.from_records(Peers.objects.filter(pubkey=channels_df['remote_pubkey'][0]).values())
channels_df['local_balance'] = channels_df['local_balance'] + channels_df['pending_outbound']
channels_df['remote_balance'] = channels_df['remote_balance'] + channels_df['pending_inbound']
@@ -1398,9 +1397,7 @@ def opens(request):
exlcude_list = AvoidNodes.objects.values_list('pubkey')
filter_60day = datetime.now() - timedelta(days=60)
payments_60day = Payments.objects.filter(creation_date__gte=filter_60day).values_list('payment_hash')
- open_list = PaymentHops.objects.filter(payment_hash__in=payments_60day).exclude(node_pubkey=self_pubkey).exclude(node_pubkey__in=current_peers).exclude(node_pubkey__in=exlcude_list).values('node_pubkey').annotate(ppm=(Sum('fee')/Sum('amt'))*1000000).annotate(score=Round((Round(Count('id')/5, output_field=IntegerField())+Round(Sum('amt')/500000, output_field=IntegerField()))/10, output_field=IntegerField())).annotate(count=Count('id')).annotate(amount=Sum('amt')).annotate(fees=Sum('fee')).annotate(sum_cost_to=Sum('cost_to')/(Sum('amt')/1000000)).exclude(score=0).order_by('-score', 'ppm')[:21]
- for open in open_list:
- open['alias'] = PaymentHops.objects.filter(node_pubkey=open['node_pubkey']).order_by('-id')[0].alias
+ open_list = PaymentHops.objects.filter(payment_hash__in=payments_60day).exclude(node_pubkey=self_pubkey).exclude(node_pubkey__in=current_peers).exclude(node_pubkey__in=exlcude_list).values('node_pubkey').annotate(ppm=(Sum('fee')/Sum('amt'))*1000000, score=Round((Round(Count('id')/5, output_field=IntegerField())+Round(Sum('amt')/500000, output_field=IntegerField()))/10, output_field=IntegerField()), count=Count('id'), amount=Sum('amt'), fees=Sum('fee'), sum_cost_to=Sum('cost_to')/(Sum('amt')/1000000), alias=Max('alias')).exclude(score=0).order_by('-score', 'ppm')[:21]
context = {
'open_list': open_list,
'avoid_list': AvoidNodes.objects.all(),
@@ -1414,7 +1411,7 @@ def opens(request):
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
def actions(request):
if request.method == 'GET':
- channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*1000)/Sum('capacity')).annotate(inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*1000)/Sum('capacity'))
+ channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(outbound_percent=((Sum('local_balance')+Sum('pending_outbound'))*1000)/Sum('capacity'), inbound_percent=((Sum('remote_balance')+Sum('pending_inbound'))*1000)/Sum('capacity'))
filter_7day = datetime.now() - timedelta(days=7)
forwards = Forwards.objects.filter(forward_date__gte=filter_7day)
action_list = []
@@ -1479,8 +1476,8 @@ def pending_htlcs(request):
stub = lnrpc.LightningStub(lnd_connect())
block_height = stub.GetInfo(ln.GetInfoRequest()).block_height
context = {
- 'incoming_htlcs': PendingHTLCs.objects.filter(incoming=True).annotate(blocks_til_expiration=Sum('expiration_height')-block_height).annotate(hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock'),
- 'outgoing_htlcs': PendingHTLCs.objects.filter(incoming=False).annotate(blocks_til_expiration=Sum('expiration_height')-block_height).annotate(hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock')
+ 'incoming_htlcs': PendingHTLCs.objects.filter(incoming=True).annotate(blocks_til_expiration=Sum('expiration_height')-block_height, hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock'),
+ 'outgoing_htlcs': PendingHTLCs.objects.filter(incoming=False).annotate(blocks_til_expiration=Sum('expiration_height')-block_height, hours_til_expiration=((Sum('expiration_height')-block_height)*10)/60).order_by('hash_lock')
}
return render(request, 'pending_htlcs.html', context)
else:
@@ -1490,15 +1487,15 @@ def pending_htlcs(request):
def failed_htlcs(request):
if request.method == 'GET':
try:
- #print (f"{datetime.now().strftime('%c')} : {request.GET.urlencode()=}")
query = None if request.GET.urlencode()[1:] == '' else request.GET.urlencode()[1:].split('_')
chan_id = None if query is None or len(query) < 1 else query[0]
direction = None if query is None or len(query) < 2 else query[1]
- #print (f"{datetime.now().strftime('%c')} : {query=} {chan_id=} {direction=}")
- failed_htlcs=FailedHTLCs.objects.all().order_by('-id')[:150] if chan_id is None else (FailedHTLCs.objects.filter(chan_id_out=chan_id).order_by('-id')[:150] if direction == "O" else FailedHTLCs.objects.filter(chan_id_in=chan_id).order_by('-id')[:150])
-
+ failed_htlcs = FailedHTLCs.objects.exclude(wire_failure=99).order_by('-id')[:150] if chan_id is None else (FailedHTLCs.objects.exclude(wire_failure=99).filter(chan_id_out=chan_id).order_by('-id')[:150] if direction == "O" else FailedHTLCs.objects.exclude(wire_failure=99).filter(chan_id_in=chan_id).order_by('-id')[:150])
+ filter_7day = datetime.now() - timedelta(days=7)
+ agg_failed_htlcs = FailedHTLCs.objects.filter(timestamp__gte=filter_7day, wire_failure=99).values('chan_id_in', 'chan_id_out').annotate(count=Count('id'), volume=Sum('amount'), chan_in_alias=Max('chan_in_alias'), chan_out_alias=Max('chan_out_alias')).order_by('-count')[:21]
context = {
- 'failed_htlcs': failed_htlcs
+ 'failed_htlcs': failed_htlcs,
+ 'agg_failed_htlcs': agg_failed_htlcs
}
return render(request, 'failed_htlcs.html', context)
except Exception as e:
@@ -1534,13 +1531,7 @@ def invoices(request):
def rebalances(request):
if request.method == 'GET':
try:
- rebalances = Rebalancer.objects.all().annotate(ppm=Round((Sum('fee_limit')*1000000)/Sum('value'), output_field=IntegerField())).order_by('-id')
- rebalances_success = rebalances.filter(status=2)
- context = {
- 'rebalances': rebalances[:150],
- 'rebalances_success' : rebalances_success[:69]
- }
- return render(request, 'rebalances.html', context)
+ return render(request, 'rebalances.html')
except Exception as e:
try:
error = str(e.code())
@@ -1553,8 +1544,10 @@ def rebalances(request):
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
def batch(request):
if request.method == 'GET':
+ stub = lnrpc.LightningStub(lnd_connect())
context = {
'iterator': range(1,11),
+ 'balances': stub.WalletBalance(ln.WalletBalanceRequest())
}
return render(request, 'batch.html', context)
else:
@@ -1701,7 +1694,7 @@ def batch_open(request):
def forwards(request):
if request.method == 'GET':
context = {
- 'forwards': Forwards.objects.all().annotate(amt_in=Sum('amt_in_msat')/1000).annotate(amt_out=Sum('amt_out_msat')/1000).annotate(ppm=Round((Sum('fee')*1000000000)/Sum('amt_out_msat'), output_field=IntegerField())).order_by('-id')[:150],
+ 'forwards': Forwards.objects.all().annotate(amt_in=Sum('amt_in_msat')/1000, amt_out=Sum('amt_out_msat')/1000, ppm=Round((Sum('fee')*1000000000)/Sum('amt_out_msat'), output_field=IntegerField())).order_by('-id')[:150],
}
return render(request, 'forwards.html', context)
else:
@@ -1710,56 +1703,9 @@ def forwards(request):
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
def rebalancing(request):
if request.method == 'GET':
- channels_df = DataFrame.from_records(Channels.objects.filter(is_open=True, private=False).annotate(percent_inbound=((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity')).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).order_by('-is_active', 'percent_outbound').values())
- filter_7day = datetime.now() - timedelta(days=7)
- rebalancer_7d_df = DataFrame.from_records(Rebalancer.objects.filter(stop__gte=filter_7day).order_by('-id').values())
- if channels_df.shape[0] > 0:
- channels_df['inbound_can'] = channels_df['percent_inbound'] / channels_df['ar_in_target']
- channels_df['local_balance'] = channels_df['local_balance'] + channels_df['pending_outbound']
- channels_df['remote_balance'] = channels_df['remote_balance'] + channels_df['pending_inbound']
- channels_df['fee_ratio'] = channels_df.apply(lambda row: 100 if row['local_fee_rate'] == 0 else int(round(((row['remote_fee_rate']/row['local_fee_rate'])*1000)/10, 0)), axis=1)
- channels_df['fee_check'] = channels_df.apply(lambda row: 1 if row['ar_max_cost'] == 0 else int(round(((row['fee_ratio']/row['ar_max_cost'])*1000)/10, 0)), axis=1)
- channels_df['steps'] = channels_df.apply(lambda row: 0 if row['inbound_can'] < 1 else int(((row['percent_inbound']-row['ar_in_target'])/((row['ar_amt_target']/row['capacity'])*100))+0.999), axis=1)
- rebalancer_count_7d_df = DataFrame() if rebalancer_7d_df.empty else rebalancer_7d_df[rebalancer_7d_df['status']>=2][rebalancer_7d_df['status']<400]
- channels_df['attempts'] = channels_df.apply(lambda row: 0 if rebalancer_count_7d_df.empty else rebalancer_count_7d_df[rebalancer_count_7d_df['last_hop_pubkey']==row.remote_pubkey].shape[0], axis=1)
- channels_df['success'] = channels_df.apply(lambda row: 0 if rebalancer_count_7d_df.empty else rebalancer_count_7d_df[rebalancer_count_7d_df['last_hop_pubkey']==row.remote_pubkey][rebalancer_count_7d_df['status']==2].shape[0], axis=1)
- channels_df['success_rate'] = channels_df.apply(lambda row: 0 if row['attempts'] == 0 else int((row['success']/row['attempts'])*100), axis=1)
- enabled_df = channels_df[channels_df['auto_rebalance']==True]
- eligible_df = enabled_df[enabled_df['is_active']==True][enabled_df['inbound_can']>=1][enabled_df['fee_check']<100]
- eligible_count = eligible_df.shape[0]
- enabled_count = enabled_df.shape[0]
- available_df = channels_df[channels_df['auto_rebalance']==False][channels_df['is_active']==True][channels_df['percent_outbound'] / channels_df['ar_out_target']>=1]
- available_count = available_df.shape[0]
- else:
- eligible_count = 0
- enabled_count = 0
- available_count = 0
- try:
- query = request.GET.urlencode()[1:]
- if query == '1':
- #Filter Sink (AR Enabled)
- channels_df = channels_df[channels_df['auto_rebalance']==True][channels_df['is_active']==True]
- elif query == '2':
- #Filter Source (Eligible to rebalance out)
- channels_df = channels_df[channels_df['auto_rebalance']==False][channels_df['is_active']==True].sort_values(by=['percent_outbound'], ascending=False)
- else:
- #Proceed
- pass
- except Exception as e:
- try:
- error = str(e.code())
- except:
- error = str(e)
- return render(request, 'error.html', {'error': error})
-
context = {
- 'eligible_count': eligible_count,
- 'enabled_count': enabled_count,
- 'available_count': available_count,
- 'channels': channels_df.to_dict(orient='records'),
- 'rebalancer': Rebalancer.objects.all().annotate(ppm=Round((Sum('fee_limit')*1000000)/Sum('value'), output_field=IntegerField())).order_by('-id')[:20],
'rebalancer_form': RebalancerForm,
- 'local_settings': LocalSettings.objects.filter(key__contains='AR-').order_by('key'),
+ 'local_settings': get_local_settings('AR-'),
'network': 'testnet/' if settings.LND_NETWORK == 'testnet' else '',
'graph_links': graph_links()
}
@@ -2000,11 +1946,10 @@ def rebalance(request):
if form.is_valid():
try:
if Channels.objects.filter(is_active=True, is_open=True, remote_pubkey=form.cleaned_data['last_hop_pubkey']).exists() or form.cleaned_data['last_hop_pubkey'] == '':
- chan_ids = []
- for channel in form.cleaned_data['outgoing_chan_ids']:
- chan_ids.append(channel.chan_id)
+ chan_ids = [ch.chan_id for ch in form.cleaned_data['outgoing_chan_ids']]
if len(chan_ids) > 0:
- target_alias = Channels.objects.filter(is_active=True, is_open=True, remote_pubkey=form.cleaned_data['last_hop_pubkey'])[0].alias if Channels.objects.filter(is_active=True, is_open=True, remote_pubkey=form.cleaned_data['last_hop_pubkey']).exists() else ''
+ target_channel = Channels.objects.filter(is_active=True, is_open=True, remote_pubkey=form.cleaned_data['last_hop_pubkey']).first()
+ target_alias = target_channel.alias if target_channel.alias != '' else target_channel.remote_pubkey[:12]
fee_limit = round(form.cleaned_data['fee_limit']*form.cleaned_data['value']*0.000001, 3)
Rebalancer(value=form.cleaned_data['value'], fee_limit=fee_limit, outgoing_chan_ids=str(chan_ids).replace('\'', ''), last_hop_pubkey=form.cleaned_data['last_hop_pubkey'], target_alias=target_alias, duration=form.cleaned_data['duration'], manual=True).save()
messages.success(request, 'Rebalancer request created!')
@@ -2022,149 +1967,109 @@ def rebalance(request):
messages.error(request, 'Invalid Request. Please try again.')
return redirect(request.META.get('HTTP_REFERER'))
+def get_local_settings(*prefixes):
+ form = []
+ if 'AR-' in prefixes:
+ form.append({'unit': '', 'form_id': 'update_channels', 'id': 'update_channels'})
+ form.append({'unit': '', 'form_id': 'enabled', 'value': 0, 'label': 'AR Enabled', 'id': 'AR-Enabled', 'title':'This enables or disables the auto-scheduling function', 'min':0, 'max':1},)
+ form.append({'unit': '%', 'form_id': 'target_percent', 'value': 0, 'label': 'AR Target Amount', 'id': 'AR-Target%', 'title': 'The percentage of the total capacity to target as the rebalance amount', 'min':0.1, 'max':100})
+ form.append({'unit': 'min', 'form_id': 'target_time', 'value': 0, 'label': 'AR Target Time', 'id': 'AR-Time', 'title': 'The time spent per individual rebalance attempt', 'min':1, 'max':60})
+ form.append({'unit': 'ppm', 'form_id': 'fee_rate', 'value': 0, 'label': 'AR Max Fee Rate', 'id': 'AR-MaxFeeRate', 'title': 'The max rate we can ever use to refill a channel with outbound', 'min':1, 'max':2500})
+ form.append({'unit': '%', 'form_id': 'outbound_percent', 'value': 0, 'label': 'AR Target Out Above', 'id': 'AR-Outbound%', 'title': 'When a channel is not enabled for targeting; the minimum outbound a channel must have to be a source for refilling another channel', 'min':1, 'max':100})
+ form.append({'unit': '%', 'form_id': 'inbound_percent', 'value': 0, 'label': 'AR Target In Above', 'id': 'AR-Inbound%', 'title': 'When a channel is enabled for targeting; the maximum inbound a channel can have before selected for auto rebalance', 'min':1, 'max':100})
+ form.append({'unit': '%', 'form_id': 'max_cost', 'value': 0, 'label': 'AR Max Cost', 'id': 'AR-MaxCost%', 'title': 'The ppm to target which is the percentage of the outbound fee rate for the channel being refilled', 'min':1, 'max':100})
+ form.append({'unit': '%', 'form_id': 'variance', 'value': 0, 'label': 'AR Variance', 'id': 'AR-Variance', 'title': 'The percentage of the target amount to be randomly varied with every rebalance attempt', 'min':0, 'max':100})
+ form.append({'unit': 'min', 'form_id': 'wait_period', 'value': 0, 'label': 'AR Wait Period', 'id': 'AR-WaitPeriod', 'title': 'The minutes we should wait after a failed attempt before trying again', 'min':1, 'max':100})
+ form.append({'unit': '', 'form_id': 'autopilot', 'value': 0, 'label': 'Autopilot', 'id': 'AR-Autopilot', 'title': 'This enables or disables the Autopilot function which automatically acts upon suggestions on this page: /actions', 'min':0, 'max':1})
+ form.append({'unit': 'days', 'form_id': 'autopilotdays', 'value': 0, 'label': 'Autopilot Days', 'id': 'AR-APDays', 'title': 'Number of days to consider for autopilot. Default 7', 'min':0, 'max':100})
+ form.append({'unit': '', 'form_id': 'workers', 'value': 1, 'label': 'Workers', 'id': 'AR-Workers', 'title': 'Number of workers', 'min':1, 'max':12})
+ if 'AF-' in prefixes:
+ form.append({'unit': '', 'form_id': 'af_enabled', 'value': 0, 'label': 'Autofee', 'id': 'AF-Enabled', 'title': 'Enable/Disable Auto-fee functionality', 'min':0, 'max':1})
+ form.append({'unit': 'ppm', 'form_id': 'af_maxRate', 'value': 0, 'label': 'AF Max Rate', 'id': 'AF-MaxRate', 'title': 'Minimum Rate', 'min':0, 'max':5000})
+ form.append({'unit': 'ppm', 'form_id': 'af_minRate', 'value': 0, 'label': 'AF Min Rate', 'id': 'AF-MinRate', 'title': 'Minimum Rate', 'min':0, 'max':5000})
+ form.append({'unit': 'ppm', 'form_id': 'af_increment', 'value': 0, 'label': 'AF Increment', 'id': 'AF-Increment', 'title': 'Amount to increment on each interaction', 'min':0, 'max':100})
+ form.append({'unit': '%', 'form_id': 'af_multiplier', 'value': 0, 'label': 'AF Multiplier', 'id': 'AF-Multiplier', 'title': 'Multiplier to be applied to Auto-Fee', 'min':0, 'max':100})
+ form.append({'unit': '', 'form_id': 'af_failedHTLCs', 'value': 0, 'label': 'AF FailedHTLCs', 'id': 'AF-FailedHTLCs', 'title': 'Failed HTLCs', 'min':0, 'max':100})
+ form.append({'unit': 'hours', 'form_id': 'af_updateHours', 'value': 0, 'label': 'AF Update', 'id': 'AF-UpdateHours', 'title': 'Number of hours to consider to update fees. Default 24', 'min':0, 'max':100})
+ if 'GUI-' in prefixes:
+ form.append({'unit': '', 'form_id': 'gui_graphLinks', 'value': graph_links(), 'label': 'Graph URL', 'id': 'GUI-GraphLinks', 'title': 'Preferred Graph URL'})
+ form.append({'unit': '', 'form_id': 'gui_netLinks', 'value': network_links(), 'label': 'NET URL', 'id': 'GUI-NetLinks', 'title': 'Preferred NET URL'})
+ if 'LND-' in prefixes:
+ form.append({'unit': '', 'form_id': 'lnd_cleanPayments', 'value': 0, 'label': 'LND Clean Payments', 'id': 'LND-CleanPayments', 'title': 'Clean LND Payments', 'min':0, 'max':1})
+ form.append({'unit': 'days', 'form_id': 'lnd_retentionDays', 'value': 30, 'label': 'LND Retention', 'id': 'LND-RetentionDays', 'title': 'LND Retention days'})
+
+ for prefix in prefixes:
+ ar_settings = LocalSettings.objects.filter(key__contains=prefix).values('key', 'value').order_by('key')
+ for field in form:
+ for sett in ar_settings:
+ if field['id'] == sett['key']:
+ field['value'] = sett['value']
+ break
+ return form
+
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
-def auto_rebalance(request):
+def update_settings(request):
if request.method == 'POST':
- form = AutoRebalanceForm(request.POST)
- if form.is_valid():
- if form.cleaned_data['chan_id'] is not None:
- target_chan_id = form.cleaned_data['chan_id']
- target_channel = Channels.objects.filter(chan_id=target_chan_id)
- if len(target_channel) == 1:
- target_channel = target_channel[0]
- target_channel.auto_rebalance = True if target_channel.auto_rebalance == False else False
- target_channel.save()
- messages.success(request, 'Updated auto rebalancer status for: ' + str(target_channel.chan_id))
- else:
- messages.error(request, 'Failed to update auto rebalancer status of channel: ' + str(target_chan_id))
- if form.cleaned_data['target_percent'] is not None:
- target_percent = float(form.cleaned_data['target_percent'])
- try:
- db_percent_target = LocalSettings.objects.get(key='AR-Target%')
- except:
- LocalSettings(key='AR-Target%', value='5').save()
- db_percent_target = LocalSettings.objects.get(key='AR-Target%')
- db_percent_target.value = target_percent
- db_percent_target.save()
- if form.cleaned_data['targetallchannels']:
- Channels.objects.all().update(ar_amt_target=Round(F('capacity')*(target_percent/100), output_field=IntegerField()))
- messages.success(request, 'Updated auto rebalancer target amount for all channels to: ' + str(target_percent))
- else:
- messages.success(request, 'Updated auto rebalancer target amount in local settings: ' + str(target_percent))
- if form.cleaned_data['target_time'] is not None:
- target_time = form.cleaned_data['target_time']
- try:
- db_time_target = LocalSettings.objects.get(key='AR-Time')
- except:
- LocalSettings(key='AR-Time', value='5').save()
- db_time_target = LocalSettings.objects.get(key='AR-Time')
- db_time_target.value = target_time
- db_time_target.save()
- messages.success(request, 'Updated auto rebalancer target time setting to: ' + str(target_time))
- if form.cleaned_data['enabled'] is not None:
- enabled = form.cleaned_data['enabled']
- try:
- db_enabled = LocalSettings.objects.get(key='AR-Enabled')
- except:
- LocalSettings(key='AR-Enabled', value='0').save()
- db_enabled = LocalSettings.objects.get(key='AR-Enabled')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated auto rebalancer enabled setting to: ' + str(enabled))
- if form.cleaned_data['outbound_percent'] is not None:
- outbound_percent = int(form.cleaned_data['outbound_percent'])
- try:
- db_outbound_target = LocalSettings.objects.get(key='AR-Outbound%')
- except:
- LocalSettings(key='AR-Outbound%', value='75').save()
- db_outbound_target = LocalSettings.objects.get(key='AR-Outbound%')
- db_outbound_target.value = outbound_percent
- db_outbound_target.save()
- if form.cleaned_data['targetallchannels']:
- Channels.objects.all().update(ar_out_target=int(outbound_percent))
- messages.success(request, 'Updated auto rebalancer target outbound percent setting for all channels to: ' + str(outbound_percent))
- else:
- messages.success(request, 'Updated auto rebalancer target outbound percent setting in local settings to: ' + str(outbound_percent))
- if form.cleaned_data['inbound_percent'] is not None:
- inbound_percent = int(form.cleaned_data['inbound_percent'])
- try:
- db_inbound_target = LocalSettings.objects.get(key='AR-Inbound%')
- except:
- LocalSettings(key='AR-Inbound%', value='100').save()
- db_inbound_target = LocalSettings.objects.get(key='AR-Inbound%')
- db_inbound_target.value = inbound_percent
- db_inbound_target.save()
- if form.cleaned_data['targetallchannels']:
- Channels.objects.all().update(ar_in_target=int(inbound_percent))
- messages.success(request, 'Updated auto rebalancer target inbound percent setting for all channels to: ' + str(inbound_percent))
- else:
- messages.success(request, 'Updated auto rebalancer target inbound percent setting in local settigs to: ' + str(inbound_percent))
- if form.cleaned_data['fee_rate'] is not None:
- fee_rate = form.cleaned_data['fee_rate']
- try:
- db_fee_rate = LocalSettings.objects.get(key='AR-MaxFeeRate')
- except:
- LocalSettings(key='AR-MaxFeeRate', value='100').save()
- db_fee_rate = LocalSettings.objects.get(key='AR-MaxFeeRate')
- db_fee_rate.value = fee_rate
- db_fee_rate.save()
- messages.success(request, 'Updated auto rebalancer max fee rate setting to: ' + str(fee_rate))
- if form.cleaned_data['max_cost'] is not None:
- max_cost = int(form.cleaned_data['max_cost'])
- try:
- db_max_cost = LocalSettings.objects.get(key='AR-MaxCost%')
- except:
- LocalSettings(key='AR-MaxCost%', value='65').save()
- db_max_cost = LocalSettings.objects.get(key='AR-MaxCost%')
- db_max_cost.value = max_cost
- db_max_cost.save()
- if form.cleaned_data['targetallchannels']:
- Channels.objects.all().update(ar_max_cost=int(max_cost))
- messages.success(request, 'Updated auto rebalancer max cost setting for all channels to: ' + str(max_cost))
- else:
- messages.success(request, 'Updated auto rebalancer max cost setting in local settings to: ' + str(max_cost))
- if form.cleaned_data['autopilot'] is not None:
- autopilot = form.cleaned_data['autopilot']
- try:
- db_autopilot = LocalSettings.objects.get(key='AR-Autopilot')
- except:
- LocalSettings(key='AR-Autopilot', value='0').save()
- db_autopilot = LocalSettings.objects.get(key='AR-Autopilot')
- db_autopilot.value = autopilot
- db_autopilot.save()
- messages.success(request, 'Updated autopilot setting to: ' + str(autopilot))
- if form.cleaned_data['autopilotdays'] is not None:
- autopilotdays = form.cleaned_data['autopilotdays']
- try:
- db_autopilotdays = LocalSettings.objects.get(key='AR-APDays')
- except:
- LocalSettings(key='AR-APDays', value='7').save()
- db_autopilotdays = LocalSettings.objects.get(key='AR-APDays')
- db_autopilotdays.value = autopilotdays
- db_autopilotdays.save()
- messages.success(request, 'Updated autopilot days setting to: ' + str(autopilotdays))
- if form.cleaned_data['variance'] is not None:
- variance = form.cleaned_data['variance']
- try:
- db_variance = LocalSettings.objects.get(key='AR-Variance')
- except:
- LocalSettings(key='AR-Variance', value='0').save()
- db_variance = LocalSettings.objects.get(key='AR-Variance')
- db_variance.value = variance
- db_variance.save()
- messages.success(request, 'Updated variance setting to: ' + str(variance))
- if form.cleaned_data['wait_period'] is not None:
- wait_period = form.cleaned_data['wait_period']
- try:
- db_wait_period = LocalSettings.objects.get(key='AR-WaitPeriod')
- except:
- LocalSettings(key='AR-WaitPeriod', value='30').save()
- db_wait_period = LocalSettings.objects.get(key='AR-WaitPeriod')
- db_wait_period.value = wait_period
- db_wait_period.save()
- messages.success(request, 'Updated wait period setting to: ' + str(wait_period))
- else:
+ template = [{'form_id': 'enabled', 'value': 0, 'parse': lambda x: x,'id': 'AR-Enabled'},
+ {'form_id': 'target_percent', 'value': 5, 'parse': lambda x: float(x),'id': 'AR-Target%'},
+ {'form_id': 'target_time', 'value': 5, 'parse': lambda x: x,'id': 'AR-Time'},
+ {'form_id': 'fee_rate', 'value': 100, 'parse': lambda x: x,'id': 'AR-MaxFeeRate'},
+ {'form_id': 'outbound_percent', 'value': 75, 'parse': lambda x: int(x),'id': 'AR-Outbound%'},
+ {'form_id': 'inbound_percent', 'value': 100, 'parse': lambda x: int(x),'id': 'AR-Inbound%'},
+ {'form_id': 'max_cost', 'value': 65, 'parse': lambda x: int(x),'id': 'AR-MaxCost%'},
+ {'form_id': 'variance', 'value': 0, 'parse': lambda x: x,'id': 'AR-Variance'},
+ {'form_id': 'wait_period', 'value': 30, 'parse': lambda x: x,'id': 'AR-WaitPeriod'},
+ {'form_id': 'autopilot', 'value': 0, 'parse': lambda x: x,'id': 'AR-Autopilot'},
+ {'form_id': 'autopilotdays', 'value': 7, 'parse': lambda x: x,'id': 'AR-APDays'},
+ {'form_id': 'workers', 'value': 5, 'parse': lambda x: x,'id': 'AR-Workers'},
+ #AF
+ {'form_id': 'af_enabled', 'value': 0, 'parse': lambda x: int(x),'id': 'AF-Enabled'},
+ {'form_id': 'af_maxRate', 'value': 2500, 'parse': lambda x: int(x),'id': 'AF-MaxRate'},
+ {'form_id': 'af_minRate', 'value': 0, 'parse': lambda x: int(x),'id': 'AF-MinRate'},
+ {'form_id': 'af_increment', 'value': 5, 'parse': lambda x: int(x),'id': 'AF-Increment'},
+ {'form_id': 'af_multiplier', 'value': 5, 'parse': lambda x: int(x),'id': 'AF-Multiplier'},
+ {'form_id': 'af_failedHTLCs', 'value': 25, 'parse': lambda x: int(x),'id': 'AF-FailedHTLCs'},
+ {'form_id': 'af_updateHours', 'value': 24, 'parse': lambda x: int(x),'id': 'AF-UpdateHours'},
+ #GUI
+ {'form_id': 'gui_graphLinks', 'value': '0', 'parse': lambda x: x,'id': 'GUI-GraphLinks'},
+ {'form_id': 'gui_netLinks', 'value': '0', 'parse': lambda x: x,'id': 'GUI-NetLinks'},
+ #LND
+ {'form_id': 'lnd_cleanPayments', 'value': '0', 'parse': lambda x: x, 'id': 'LND-CleanPayments'},
+ {'form_id': 'lnd_retentionDays', 'value': '0', 'parse': lambda x: x, 'id': 'LND-RetentionDays'},
+ ]
+
+ form = LocalSettingsForm(request.POST)
+ if not form.is_valid():
messages.error(request, 'Invalid Request. Please try again.')
+ else:
+ update_channels = form.cleaned_data['update_channels']
+ for field in template:
+ value = form.cleaned_data[field['form_id']]
+ if value is not None:
+ value = field['parse'](value)
+ try:
+ db_value = LocalSettings.objects.get(key=field['id'])
+ except:
+ LocalSettings(key=field['id'], value=field['value']).save()
+ db_value = LocalSettings.objects.get(key=field['id'])
+ if db_value.value == str(value) or len(str(value)) == 0:
+ continue
+ db_value.value = value
+ db_value.save()
+
+ if update_channels and field['id'] in ['AR-Target%', 'AR-Outbound%','AR-Inbound%','AR-MaxCost%']:
+ if field['id'] == 'AR-Target%':
+ Channels.objects.all().update(ar_amt_target=Round(F('capacity')*(value/100), output_field=IntegerField()))
+ elif field['id'] == 'AR-Outbound%':
+ Channels.objects.all().update(ar_out_target=value)
+ elif field['id'] == 'AR-Inbound%':
+ Channels.objects.all().update(ar_in_target=value)
+ elif field['id'] == 'AR-MaxCost%':
+ Channels.objects.all().update(ar_max_cost=value)
+ messages.success(request, 'All channels ' + field['id'] + ' updated to: ' + str(value))
+ else:
+ messages.success(request, field['id'] + ' updated to: ' + str(value))
+
return redirect(request.META.get('HTTP_REFERER'))
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
@@ -2175,23 +2080,17 @@ def update_channel(request):
chan_id = form.cleaned_data['chan_id']
target = form.cleaned_data['target']
update_target = int(form.cleaned_data['update_target'])
- db_channel = Channels.objects.filter(chan_id=chan_id)[0]
+ db_channel = Channels.objects.get(chan_id=chan_id)
if update_target == 0:
stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=target, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=db_channel.local_cltv))
db_channel.local_base_fee = target
db_channel.save()
messages.success(request, 'Base fee for channel ' + str(db_channel.alias) + ' (' + str(db_channel.chan_id) + ') updated to a value of: ' + str(target))
elif update_target == 1:
stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(target/1000000), time_lock_delta=db_channel.local_cltv))
old_fee_rate = db_channel.local_fee_rate
db_channel.local_fee_rate = target
@@ -2221,10 +2120,7 @@ def update_channel(request):
messages.success(request, 'Auto rebalancer max cost for channel ' + str(db_channel.alias) + ' (' + str(db_channel.chan_id) + ') updated to a value of: ' + str(target) + '%')
elif update_target == 7:
stub = lnrouter.RouterStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChanStatus(lnr.UpdateChanStatusRequest(chan_point=channel_point, action=0)) if target == 1 else stub.UpdateChanStatus(lnr.UpdateChanStatusRequest(chan_point=channel_point, action=1))
db_channel.local_disabled = False if target == 1 else True
db_channel.save()
@@ -2237,30 +2133,21 @@ def update_channel(request):
messages.success(request, 'Auto fees status for channel ' + str(db_channel.alias) + ' (' + str(db_channel.chan_id) + ') updated to a value of: ' + str(db_channel.auto_fees))
elif update_target == 9:
stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=target))
db_channel.local_cltv = target
db_channel.save()
messages.success(request, 'CLTV for channel ' + str(db_channel.alias) + ' (' + str(db_channel.chan_id) + ') updated to a value of: ' + str(float(target)))
elif update_target == 10:
stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=db_channel.local_cltv, min_htlc_msat_specified=True, min_htlc_msat=int(target*1000)))
db_channel.local_min_htlc_msat = int(target*1000)
db_channel.save()
messages.success(request, 'Min HTLC for channel ' + str(db_channel.alias) + ' (' + str(db_channel.chan_id) + ') updated to a value of: ' + str(float(target)))
elif update_target == 11:
stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=db_channel.local_cltv, max_htlc_msat=int(target*1000)))
db_channel.local_max_htlc_msat = int(target*1000)
db_channel.save()
@@ -2328,6 +2215,13 @@ def update_pending(request):
messages.error(request, 'Invalid Request. Please try again.')
return redirect(request.META.get('HTTP_REFERER'))
+def point(ch: Channels):
+ channel_point = ln.ChannelPoint()
+ channel_point.funding_txid_bytes = bytes.fromhex(ch.funding_txid)
+ channel_point.funding_txid_str = ch.funding_txid
+ channel_point.output_index = ch.output_index
+ return channel_point
+
@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
def update_setting(request):
if request.method == 'POST':
@@ -2335,175 +2229,12 @@ def update_setting(request):
if form.is_valid():
key = form.cleaned_data['key']
value = form.cleaned_data['value']
- if key == 'AR-Target%':
- target_percent = float(value)
- try:
- db_percent_target = LocalSettings.objects.get(key='AR-Target%')
- except:
- LocalSettings(key='AR-Target%', value='5').save()
- db_percent_target = LocalSettings.objects.get(key='AR-Target%')
- db_percent_target.value = target_percent
- db_percent_target.save()
- messages.success(request, 'Updated auto rebalancer target amount to: ' + str(target_percent))
- elif key == 'AR-Time':
- target_time = int(value)
- try:
- db_time_target = LocalSettings.objects.get(key='AR-Time')
- except:
- LocalSettings(key='AR-Time', value='5').save()
- db_time_target = LocalSettings.objects.get(key='AR-Time')
- db_time_target.value = target_time
- db_time_target.save()
- messages.success(request, 'Updated auto rebalancer target time setting to: ' + str(target_time))
- elif key == 'AR-Workers':
- workers = int(value)
- try:
- db_workers = LocalSettings.objects.get(key='AR-Workers')
- except:
- LocalSettings(key='AR-Workers', value='5').save()
- db_workers = LocalSettings.objects.get(key='AR-Workers')
- db_workers.value = workers
- db_workers.save()
- messages.success(request, 'Updated auto rebalancer workers setting to: ' + str(workers))
- elif key == 'AR-Enabled':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AR-Enabled')
- except:
- LocalSettings(key='AR-Enabled', value='0').save()
- db_enabled = LocalSettings.objects.get(key='AR-Enabled')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated auto rebalancer enabled setting to: ' + str(enabled))
- elif key == 'AR-Outbound%':
- outbound_percent = int(value)
- try:
- db_outbound_target = LocalSettings.objects.get(key='AR-Outbound%')
- except:
- LocalSettings(key='AR-Outbound%', value='75').save()
- db_outbound_target = LocalSettings.objects.get(key='AR-Outbound%')
- db_outbound_target.value = outbound_percent
- db_outbound_target.save()
- messages.success(request, 'Updated auto rebalancer target outbound percent setting: ' + str(outbound_percent))
- elif key == 'AR-Inbound%':
- inbound_percent = int(value)
- try:
- db_inbound_target = LocalSettings.objects.get(key='AR-Inbound%')
- except:
- LocalSettings(key='AR-Inbound%', value='100').save()
- db_inbound_target = LocalSettings.objects.get(key='AR-Inbound%')
- db_inbound_target.value = inbound_percent
- db_inbound_target.save()
- messages.success(request, 'Updated auto rebalancer target inbound percent setting: ' + str(inbound_percent))
- elif key == 'AR-MaxFeeRate':
- fee_rate = int(value)
- try:
- db_fee_rate = LocalSettings.objects.get(key='AR-MaxFeeRate')
- except:
- LocalSettings(key='AR-MaxFeeRate', value='100').save()
- db_fee_rate = LocalSettings.objects.get(key='AR-MaxFeeRate')
- db_fee_rate.value = fee_rate
- db_fee_rate.save()
- messages.success(request, 'Updated auto rebalancer max fee rate setting to: ' + str(fee_rate))
- elif key == 'AR-MaxCost%':
- max_cost = int(value)
- try:
- db_max_cost = LocalSettings.objects.get(key='AR-MaxCost%')
- except:
- LocalSettings(key='AR-MaxCost%', value='65').save()
- db_max_cost = LocalSettings.objects.get(key='AR-MaxCost%')
- db_max_cost.value = max_cost
- db_max_cost.save()
- messages.success(request, 'Updated auto rebalancer max cost setting to: ' + str(max_cost))
- elif key == 'AR-Autopilot':
- autopilot = int(value)
- try:
- db_autopilot = LocalSettings.objects.get(key='AR-Autopilot')
- except:
- LocalSettings(key='AR-Autopilot', value='0').save()
- db_autopilot = LocalSettings.objects.get(key='AR-Autopilot')
- db_autopilot.value = autopilot
- db_autopilot.save()
- messages.success(request, 'Updated autopilot setting to: ' + str(autopilot))
- elif key == 'AR-APDays':
- apdays = int(value)
- try:
- db_apdays = LocalSettings.objects.get(key='AR-APDays')
- except:
- LocalSettings(key='AR-APDays', value='7').save()
- db_apdays = LocalSettings.objects.get(key='AR-APDays')
- db_apdays.value = apdays
- db_apdays.save()
- messages.success(request, 'Updated Autopilot Days setting to: ' + str(apdays))
- elif key == 'AR-Variance':
- variance = int(value)
- try:
- db_variance = LocalSettings.objects.get(key='AR-Variance')
- except:
- LocalSettings(key='AR-Variance', value='0').save()
- db_variance = LocalSettings.objects.get(key='AR-Variance')
- db_variance.value = variance
- db_variance.save()
- messages.success(request, 'Updated variance setting to: ' + str(variance))
- elif key == 'AR-WaitPeriod':
- wait_period = int(value)
- try:
- db_wait_period = LocalSettings.objects.get(key='AR-WaitPeriod')
- except:
- LocalSettings(key='AR-WaitPeriod', value='0').save()
- db_wait_period = LocalSettings.objects.get(key='AR-WaitPeriod')
- db_wait_period.value = wait_period
- db_wait_period.save()
- messages.success(request, 'Updated wait period setting to: ' + str(wait_period))
- elif key == 'GUI-GraphLinks':
- links = str(value)
- try:
- db_links = LocalSettings.objects.get(key='GUI-GraphLinks')
- except:
- LocalSettings(key='GUI-GraphLinks', value='0').save()
- db_links = LocalSettings.objects.get(key='GUI-GraphLinks')
- db_links.value = links
- db_links.save()
- messages.success(request, 'Updated graph links to use: ' + str(links))
- elif key == 'GUI-NetLinks':
- links = str(value)
- try:
- db_links = LocalSettings.objects.get(key='GUI-NetLinks')
- except:
- LocalSettings(key='GUI-NetLinks', value='0').save()
- db_links = LocalSettings.objects.get(key='GUI-NetLinks')
- db_links.value = links
- db_links.save()
- messages.success(request, 'Updated network links to use: ' + str(links))
- elif key == 'LND-CleanPayments':
- clean_payments = int(value)
- try:
- db_clean_payments = LocalSettings.objects.get(key='LND-CleanPayments')
- except:
- LocalSettings(key='LND-CleanPayments', value='0').save()
- db_clean_payments = LocalSettings.objects.get(key='LND-CleanPayments')
- db_clean_payments.value = clean_payments
- db_clean_payments.save()
- messages.success(request, 'Updated auto payment cleanup setting to: ' + str(clean_payments))
- elif key == 'LND-RetentionDays':
- retention_days = int(value)
- try:
- db_retention_days = LocalSettings.objects.get(key='LND-RetentionDays')
- except:
- LocalSettings(key='LND-RetentionDays', value='0').save()
- db_retention_days = LocalSettings.objects.get(key='LND-RetentionDays')
- db_retention_days.value = retention_days
- db_retention_days.save()
- messages.success(request, 'Updated payment cleanup retention days to: ' + str(retention_days))
- elif key == 'ALL-oRate':
+ if key == 'ALL-oRate':
target = int(value)
stub = lnrpc.LightningStub(lnd_connect())
channels = Channels.objects.filter(is_open=True)
for db_channel in channels:
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(target/1000000), time_lock_delta=db_channel.local_cltv))
old_fee_rate = db_channel.local_fee_rate
db_channel.local_fee_rate = target
@@ -2516,11 +2247,7 @@ def update_setting(request):
stub = lnrpc.LightningStub(lnd_connect())
channels = Channels.objects.filter(is_open=True)
for db_channel in channels:
- stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=target, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=db_channel.local_cltv))
db_channel.local_base_fee = target
db_channel.save()
@@ -2530,11 +2257,7 @@ def update_setting(request):
stub = lnrpc.LightningStub(lnd_connect())
channels = Channels.objects.filter(is_open=True)
for db_channel in channels:
- stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=target))
db_channel.local_cltv = target
db_channel.save()
@@ -2544,11 +2267,7 @@ def update_setting(request):
stub = lnrpc.LightningStub(lnd_connect())
channels = Channels.objects.filter(is_open=True)
for db_channel in channels:
- stub = lnrpc.LightningStub(lnd_connect())
- channel_point = ln.ChannelPoint()
- channel_point.funding_txid_bytes = bytes.fromhex(db_channel.funding_txid)
- channel_point.funding_txid_str = db_channel.funding_txid
- channel_point.output_index = db_channel.output_index
+ channel_point = point(db_channel)
stub.UpdateChannelPolicy(ln.PolicyUpdateRequest(chan_point=channel_point, base_fee_msat=db_channel.local_base_fee, fee_rate=(db_channel.local_fee_rate/1000000), time_lock_delta=db_channel.local_cltv, min_htlc_msat_specified=True, min_htlc_msat=target))
db_channel.local_min_htlc_msat = target
db_channel.save()
@@ -2577,67 +2296,6 @@ def update_setting(request):
target = int(value)
channels = Channels.objects.filter(is_open=True, private=False).update(auto_fees=target)
messages.success(request, 'Auto Fees setting for all channels updated to a value of: ' + str(target))
- elif key == 'AF-Enabled':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AF-Enabled')
- except:
- LocalSettings(key='AF-Enabled', value='0').save()
- db_enabled = LocalSettings.objects.get(key='AF-Enabled')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated autofees enabled setting to: ' + str(enabled))
- elif key == 'AF-MaxRate':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AF-MaxRate')
- except:
- LocalSettings(key='AF-MaxRate', value='2500').save()
- db_enabled = LocalSettings.objects.get(key='AF-MaxRate')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated autofees max rate setting to: ' + str(enabled))
- elif key == 'AF-MinRate':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AF-MinRate')
- except:
- LocalSettings(key='AF-MinRate', value='0').save()
- db_enabled = LocalSettings.objects.get(key='AF-MinRate')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated autofees min rate setting to: ' + str(enabled))
- elif key == 'AF-Increment':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AF-Increment')
- except:
- LocalSettings(key='AF-Increment', value='5').save()
- db_enabled = LocalSettings.objects.get(key='AF-Increment')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated autofees fee increment setting to: ' + str(enabled))
- elif key == 'AF-Multiplier':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AF-Multiplier')
- except:
- LocalSettings(key='AF-Multiplier', value='5').save()
- db_enabled = LocalSettings.objects.get(key='AF-Multiplier')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated autofees fee multiplier setting to: ' + str(enabled))
- elif key == 'AF-FailedHTLCs':
- enabled = int(value)
- try:
- db_enabled = LocalSettings.objects.get(key='AF-FailedHTLCs')
- except:
- LocalSettings(key='AF-FailedHTLCs', value='25').save()
- db_enabled = LocalSettings.objects.get(key='AF-FailedHTLCs')
- db_enabled.value = enabled
- db_enabled.save()
- messages.success(request, 'Updated autofees daily failed HTLC trigger limit setting to: ' + str(enabled))
- elif key == 'AF-UpdateHours':
enabled = int(value)
try:
db_enabled = LocalSettings.objects.get(key='AF-UpdateHours')
@@ -2730,10 +2388,23 @@ def get_fees(request):
return redirect(request.META.get('HTTP_REFERER'))
return redirect(request.META.get('HTTP_REFERER'))
+@is_login_required(login_required(login_url='/lndg-admin/login/?next=/'), settings.LOGIN_REQUIRED)
+def sign_message(request):
+ if request.method == 'POST':
+ msg = request.POST.get("msg")
+ stub = lnrpc.LightningStub(lnd_connect())
+ req = ln.SignMessageRequest(msg=msg.encode('utf-8'), single_hash=False)
+ response = stub.SignMessage(req)
+ messages.success(request, "Signed message: " + str(response.signature))
+ else:
+ messages.error(request, 'Invalid Request. Please try again.')
+ return redirect(request.META.get('HTTP_REFERER'))
+
class PaymentsViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsAuthenticated] if settings.LOGIN_REQUIRED else []
queryset = Payments.objects.all()
serializer_class = PaymentSerializer
+ filterset_fields = ['status']
class PaymentHopsViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsAuthenticated] if settings.LOGIN_REQUIRED else []
@@ -2744,6 +2415,7 @@ class InvoicesViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsAuthenticated] if settings.LOGIN_REQUIRED else []
queryset = Invoices.objects.all()
serializer_class = InvoiceSerializer
+ filterset_fields = ['state']
def update(self, request, pk=None):
setting = get_object_or_404(Invoices.objects.all(), pk=pk)
@@ -2807,6 +2479,7 @@ class ChannelsViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsAuthenticated] if settings.LOGIN_REQUIRED else []
queryset = Channels.objects.all()
serializer_class = ChannelSerializer
+ filterset_fields = ['is_open', 'private', 'is_active', 'auto_rebalance']
def update(self, request, pk=None):
channel = get_object_or_404(Channels.objects.all(), pk=pk)
@@ -2814,21 +2487,29 @@ def update(self, request, pk=None):
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
- else:
- return Response(serializer.errors)
+ return Response(serializer.errors)
class RebalancerViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsAuthenticated] if settings.LOGIN_REQUIRED else []
- queryset = Rebalancer.objects.all()
+ queryset = Rebalancer.objects.all().order_by('-id')
serializer_class = RebalancerSerializer
-
+ filterset_fields = {'status':['lt','gt','exact'], 'payment_hash':['exact'], 'stop':['gt']}
+
def create(self, request):
- serializer = RebalancerSerializer(data=request.data)
+ serializer = self.get_serializer(data=request.data, context={'request': request})
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
- else:
- return Response(serializer.errors)
+ return Response(serializer.errors)
+
+ def update(self, request, pk):
+ rebalance = get_object_or_404(Rebalancer.objects.all(), pk=pk)
+ serializer = RebalancerSerializer(rebalance, data=request.data, context={'request': request}, partial=True)
+ if serializer.is_valid():
+ rebalance.stop = datetime.now()
+ serializer.save()
+ return Response(serializer.data)
+ return Response(serializer.errors)
@api_view(['POST'])
@is_login_required(permission_classes([IsAuthenticated]), settings.LOGIN_REQUIRED)
@@ -2858,6 +2539,17 @@ def connect_peer(request):
else:
return Response({'error': 'Invalid request!'})
+@api_view(['GET'])
+@is_login_required(permission_classes([IsAuthenticated]), settings.LOGIN_REQUIRED)
+def rebalance_stats(request):
+ try:
+ filter_7day = datetime.now() - timedelta(days=7)
+ rebalances = Rebalancer.objects.filter(stop__gt=filter_7day).values('last_hop_pubkey').annotate(attempts=Count('last_hop_pubkey'), successes=Sum(Case(When(status=2, then=1), output_field=IntegerField())))
+ return Response(rebalances)
+ except Exception as e:
+ error = str(e)
+ return Response({'error': 'Unable to fetch stats! Error: ' + error})
+
@api_view(['POST'])
@is_login_required(permission_classes([IsAuthenticated]), settings.LOGIN_REQUIRED)
def open_channel(request):
@@ -3144,3 +2836,28 @@ def pending_channels(request):
debug_error_index = error.find('debug_error_string =') - 3
error_msg = error[details_index:debug_error_index]
return Response({'error': 'Failed to get pending channels! Error: ' + error_msg})
+
+@api_view(['POST'])
+@is_login_required(permission_classes([IsAuthenticated]), settings.LOGIN_REQUIRED)
+def bump_fee(request):
+ serializer = BumpFeeSerializer(data=request.data)
+ if serializer.is_valid():
+ txid = serializer.validated_data['txid']
+ index = serializer.validated_data['index']
+ target_fee = serializer.validated_data['target_fee']
+ force = serializer.validated_data['force']
+ try:
+ target_outpoint = ln.OutPoint()
+ target_outpoint.txid_str = txid
+ target_outpoint.output_index = index
+ stub = walletstub.WalletKitStub(lnd_connect())
+ stub.BumpFee(walletrpc.BumpFeeRequest(outpoint=target_outpoint, sat_per_vbyte=target_fee, force=force))
+ return Response({'message': f'Fee bumped to {target_fee} sats/vbyte for outpoint: {txid}:{index}'})
+ except Exception as e:
+ error = str(e)
+ details_index = error.find('details =') + 11
+ debug_error_index = error.find('debug_error_string =') - 3
+ error_msg = error[details_index:debug_error_index]
+ return Response({'error': f'Fee bump failed! Error: {error_msg}'})
+ else:
+ return Response({'error': 'Invalid request!'})
\ No newline at end of file
diff --git a/htlc_stream.py b/htlc_stream.py
index 525d6b9b..fc9978ef 100644
--- a/htlc_stream.py
+++ b/htlc_stream.py
@@ -12,6 +12,7 @@ def main():
try:
connection = lnd_connect()
routerstub = lnrouter.RouterStub(connection)
+ all_forwards = {}
for response in routerstub.SubscribeHtlcEvents(lnr.SubscribeHtlcEventsRequest()):
if response.event_type == 3 and str(response.link_fail_event) != '':
in_chan_id = response.incoming_channel_id
@@ -20,13 +21,40 @@ def main():
out_chan = Channels.objects.filter(chan_id=out_chan_id)[0] if Channels.objects.filter(chan_id=out_chan_id).exists() else None
in_chan_alias = in_chan.alias if in_chan is not None else None
out_chan_alias = out_chan.alias if out_chan is not None else None
- out_chan_liq = out_chan.local_balance if out_chan is not None else None
+ out_chan_liq = (out_chan.local_balance - out_chan.local_chan_reserve) if out_chan is not None else None
out_chan_pending = out_chan.pending_outbound if out_chan is not None else None
amount = int(response.link_fail_event.info.outgoing_amt_msat/1000)
wire_failure = response.link_fail_event.wire_failure
failure_detail = response.link_fail_event.failure_detail
missed_fee = (response.link_fail_event.info.incoming_amt_msat - response.link_fail_event.info.outgoing_amt_msat)/1000
FailedHTLCs(amount=amount, chan_id_in=in_chan_id, chan_id_out=out_chan_id, chan_in_alias=in_chan_alias, chan_out_alias=out_chan_alias, chan_out_liq=out_chan_liq, chan_out_pending=out_chan_pending, wire_failure=wire_failure, failure_detail=failure_detail, missed_fee=missed_fee).save()
+ elif response.event_type == 3 and str(response.forward_event) != '':
+ # Add forward_event
+ key = str(response.incoming_channel_id) + str(response.outgoing_channel_id) + str(response.incoming_htlc_id) + str(response.outgoing_htlc_id)
+ all_forwards[key] = response.forward_event
+ elif response.event_type == 3 and str(response.settle_event) != '':
+ # Delete forward_event
+ key = str(response.incoming_channel_id) + str(response.outgoing_channel_id) + str(response.incoming_htlc_id) + str(response.outgoing_htlc_id)
+ if key in all_forwards.keys():
+ del all_forwards[key]
+ elif response.event_type == 3 and str(response.forward_fail_event) == '':
+ key = str(response.incoming_channel_id) + str(response.outgoing_channel_id) + str(response.incoming_htlc_id) + str(response.outgoing_htlc_id)
+ if key in all_forwards.keys():
+ forward_event = all_forwards[key]
+ in_chan_id = response.incoming_channel_id
+ out_chan_id = response.outgoing_channel_id
+ in_chan = Channels.objects.filter(chan_id=in_chan_id)[0] if Channels.objects.filter(chan_id=in_chan_id).exists() else None
+ out_chan = Channels.objects.filter(chan_id=out_chan_id)[0] if Channels.objects.filter(chan_id=out_chan_id).exists() else None
+ in_chan_alias = in_chan.alias if in_chan is not None else None
+ out_chan_alias = out_chan.alias if out_chan is not None else None
+ out_chan_liq = (out_chan.local_balance - out_chan.local_chan_reserve) if out_chan is not None else None
+ out_chan_pending = out_chan.pending_outbound if out_chan is not None else None
+ amount = int(forward_event.info.incoming_amt_msat/1000)
+ wire_failure = 99
+ failure_detail = 99
+ missed_fee = (forward_event.info.incoming_amt_msat - forward_event.info.outgoing_amt_msat)/1000
+ FailedHTLCs(amount=amount, chan_id_in=in_chan_id, chan_id_out=out_chan_id, chan_in_alias=in_chan_alias, chan_out_alias=out_chan_alias, chan_out_liq=out_chan_liq, chan_out_pending=out_chan_pending, wire_failure=wire_failure, failure_detail=failure_detail, missed_fee=missed_fee).save()
+ del all_forwards[key]
except Exception as e:
print('Error while running failed HTLC stream: ' + str(e))
sleep(20)
diff --git a/initialize.py b/initialize.py
index 1a2adc6f..54dbacc6 100644
--- a/initialize.py
+++ b/initialize.py
@@ -75,6 +75,7 @@ def write_settings(node_ip, lnd_tls_path, lnd_macaroon_path, lnd_database_path,
'django.contrib.humanize',
'gui',
'rest_framework',
+ 'django_filters',
'qr_code',
]
@@ -148,6 +149,7 @@ def write_settings(node_ip, lnd_tls_path, lnd_macaroon_path, lnd_database_path,
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
],
+ 'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
}
# Internationalization
diff --git a/jobs.py b/jobs.py
index 5066dd9f..ed48b8a4 100644
--- a/jobs.py
+++ b/jobs.py
@@ -1,5 +1,6 @@
import django
-from django.db.models import Max
+from django.db.models import Max, Sum, Avg, Count
+from django.db.models.functions import TruncDay
from datetime import datetime, timedelta
from gui.lnd_deps import lightning_pb2 as ln
from gui.lnd_deps import lightning_pb2_grpc as lnrpc
@@ -12,7 +13,7 @@
from requests import get
environ['DJANGO_SETTINGS_MODULE'] = 'lndg.settings'
django.setup()
-from gui.models import Payments, PaymentHops, Invoices, Forwards, Channels, Peers, Onchain, Closures, Resolutions, PendingHTLCs, LocalSettings, FailedHTLCs, Autofees, PendingChannels, Rebalancer, PeerEvents
+from gui.models import Payments, PaymentHops, Invoices, Forwards, Channels, Peers, Onchain, Closures, Resolutions, PendingHTLCs, LocalSettings, FailedHTLCs, Autofees, PendingChannels, HistFailedHTLC, PeerEvents
def update_payments(stub):
self_pubkey = stub.GetInfo(ln.GetInfoRequest()).identity_pubkey
@@ -28,13 +29,13 @@ def update_payments(stub):
last_index = Payments.objects.aggregate(Max('index'))['index__max'] if Payments.objects.exists() else 0
payments = stub.ListPayments(ln.ListPaymentsRequest(include_incomplete=True, index_offset=last_index, max_payments=100)).payments
for payment in payments:
- #print (f"{datetime.now().strftime('%c')} : Processing New {payment.payment_index=} {payment.status=} {payment.payment_hash=}")
+ #print(f"{datetime.now().strftime('%c')} : Processing New {payment.payment_index=} {payment.status=} {payment.payment_hash=}")
try:
new_payment = Payments(creation_date=datetime.fromtimestamp(payment.creation_date), payment_hash=payment.payment_hash, value=round(payment.value_msat/1000, 3), fee=round(payment.fee_msat/1000, 3), status=payment.status, index=payment.payment_index)
new_payment.save()
except Exception as e:
#Error inserting, try to update instead
- print (f"{datetime.now().strftime('%c')} : Error processing {new_payment=} : {str(e)=}")
+ print(f"{datetime.now().strftime('%c')} : Error processing {new_payment=} : {str(e)=}")
update_payment(stub, payment, self_pubkey)
def update_payment(stub, payment, self_pubkey):
@@ -44,13 +45,13 @@ def update_payment(stub, payment, self_pubkey):
db_payment.fee = round(payment.fee_msat/1000, 3)
db_payment.status = payment.status
db_payment.index = payment.payment_index
- if payment.status == 2 or payment.status == 1 or payment.status == 3:
+ if payment.status == 2 or payment.status == 1:
PaymentHops.objects.filter(payment_hash=db_payment).delete()
db_payment.chan_out = None
db_payment.rebal_chan = None
db_payment.save()
for attempt in payment.htlcs:
- if attempt.status == 1 or attempt.status == 0 or attempt.status == 2:
+ if attempt.status == 1 or attempt.status == 0:
hops = attempt.route.hops
hop_count = 0
cost_to = 0
@@ -66,7 +67,7 @@ def update_payment(stub, payment, self_pubkey):
# Add additional HTLC information in last hop alias
alias += f'[ {payment.status}-{attempt.status}-{attempt.failure.code}-{attempt.failure.failure_source_index} ]'
#if hop_count == total_hops:
- #print (f"{datetime.now().strftime('%c')} : Debug Hop {attempt.attempt_id=} {attempt.route.total_amt=} {hop.mpp_record.payment_addr.hex()=} {hop.mpp_record.total_amt_msat=} {hop.amp_record=} {db_payment.payment_hash=}")
+ #print(f"{datetime.now().strftime('%c')} : Debug Hop {attempt.attempt_id=} {attempt.route.total_amt=} {hop.mpp_record.payment_addr.hex()=} {hop.mpp_record.total_amt_msat=} {hop.amp_record=} {db_payment.payment_hash=}")
if attempt.status == 1 or attempt.status == 0 or (attempt.status == 2 and attempt.failure.code in (1,2,12)):
PaymentHops(payment_hash=db_payment, attempt_id=attempt.attempt_id, step=hop_count, chan_id=hop.chan_id, alias=alias, chan_capacity=hop.chan_capacity, node_pubkey=hop.pub_key, amt=round(hop.amt_to_forward_msat/1000, 3), fee=round(fee, 3), cost_to=round(cost_to, 3)).save()
cost_to += fee
@@ -85,73 +86,11 @@ def update_payment(stub, payment, self_pubkey):
if hop_count == total_hops and hop.pub_key == self_pubkey and db_payment.rebal_chan is None:
db_payment.rebal_chan = hop.chan_id
db_payment.save()
- try:
- adjust_ar_amt( payment, db_payment.rebal_chan )
- except Exception as e:
- print (f"{datetime.now().strftime('%c')} : Error adjusting AR Amount {payment=} {db_payment.rebal_chan=} : {str(e)=}")
-
-def adjust_ar_amt( payment, chan_id ):
- if payment.status not in (2,3):
- return
- #skip rapid fire rebalances
- last_rebalance_duration = Rebalancer.objects.filter(payment_hash=payment.payment_hash)[0].duration if Rebalancer.objects.filter(payment_hash=payment.payment_hash).exists() else 0
- #print (f"{datetime.now().strftime('%c')} : DEBUG {last_rebalance_duration=} {payment.payment_hash=}")
- if last_rebalance_duration <= 1 or payment.status not in (2,3):
- print (f"{datetime.now().strftime('%c')} : Skipping Liquidity Estimation {last_rebalance_duration=} {payment.payment_hash=}")
- return
- #To be coverted to settings later
- lower_limit = 69420
- upper_limit = 2
-
- if LocalSettings.objects.filter(key='AR-Target%').exists():
- ar_target = float(LocalSettings.objects.filter(key='AR-Target%')[0].value)
- else:
- LocalSettings(key='AR-Target%', value='5').save()
- ar_target = 5
-
- #Adjust AR Target Amount, increase if success reduce if failed.
- db_channel = Channels.objects.filter(chan_id = chan_id)[0] if Channels.objects.filter(chan_id = chan_id).exists() else None
- if payment.status == 2 and chan_id is not None:
- if db_channel is not None and payment.value_msat/1000 > 1000 :
- new_ar_amount = int(min(max(db_channel.ar_amt_target * 1.11, payment.value_msat/1000), db_channel.capacity*ar_target*upper_limit/100))
- if new_ar_amount > db_channel.ar_amt_target:
- print (f"{datetime.now().strftime('%c')} : Increase AR Target Amount {chan_id=} {db_channel.alias=} {db_channel.ar_amt_target=} {new_ar_amount=}")
- db_channel.ar_amt_target = new_ar_amount
- db_channel.save()
-
- if payment.status == 3:
- estimated_liquidity = 0
- attempt = None
- for attempt in payment.htlcs:
- total_hops=len(attempt.route.hops)
- #Failure Codes https://github.com/lightningnetwork/lnd/blob/9f013f5058a7780075bca393acfa97aa0daec6a0/lnrpc/lightning.proto#L4200
- if (attempt.failure.code in (1,2) and attempt.failure.failure_source_index == total_hops) or attempt.failure.code == 12:
- #Failure 1,2 from last hop indicating liquidity available, failure 12 shows fees in sufficient but liquidity available
- estimated_liquidity += attempt.route.total_amt
- chan_id=attempt.route.hops[len(attempt.route.hops)-1].chan_id
- print (f"{datetime.now().strftime('%c')} : Liquidity Estimation {attempt.attempt_id=} {attempt.status=} {attempt.failure.code=} {chan_id=} {attempt.route.total_amt=} {payment.value_msat/1000=} {estimated_liquidity=} {payment.payment_hash=}")
-
- if estimated_liquidity == 0:
- if attempt is not None:
- #Could not estimate liquidity for valid attempts, reduce by half
- estimated_liquidity = db_channel.ar_amt_target/2 if db_channel is not None else 0
- print (f"{datetime.now().strftime('%c')} : Liquidity Estimation not possible, halving {attempt.attempt_id=} {attempt.status=} {attempt.failure.code=} {chan_id=} {attempt.route.total_amt=} {payment.value_msat/1000=} {estimated_liquidity=} {payment.payment_hash=}")
- else:
- #Mostly a case of NO ROUTE
- print (f"{datetime.now().strftime('%c')} : Liquidity Estimation not performed {payment.payment_hash=} {payment.status=} {chan_id=} {estimated_liquidity=} {attempt=}")
-
- if payment.value_msat/1000 >= lower_limit and estimated_liquidity <= payment.value_msat/1000 and estimated_liquidity > 0:
- #Change AR amount. Ignore zero liquidity case which implies breakout from rapid fire AR
- new_ar_amount = int(estimated_liquidity if estimated_liquidity > lower_limit else lower_limit)
- if db_channel is not None and new_ar_amount < db_channel.ar_amt_target:
- print (f"{datetime.now().strftime('%c')} : Decrease AR Target Amount {chan_id=} {db_channel.alias=} {db_channel.ar_amt_target=} {new_ar_amount=}")
- db_channel.ar_amt_target = new_ar_amount
- db_channel.save()
def update_invoices(stub):
open_invoices = Invoices.objects.filter(state=0).order_by('index')
for open_invoice in open_invoices:
- #print (f"{datetime.now().strftime('%c')} : Processing open invoice {open_invoice.index=} {open_invoice.state=} {open_invoice.r_hash=}")
+ #print(f"{datetime.now().strftime('%c')} : Processing open invoice {open_invoice.index=} {open_invoice.state=} {open_invoice.r_hash=}")
invoice_data = stub.ListInvoices(ln.ListInvoiceRequest(index_offset=open_invoice.index-1, num_max_invoices=1)).invoices
if len(invoice_data) > 0 and open_invoice.r_hash == invoice_data[0].r_hash.hex():
update_invoice(stub, invoice_data[0], open_invoice)
@@ -179,7 +118,7 @@ def update_invoice(stub, invoice, db_invoice):
try:
valid = signerstub.VerifyMessage(lns.VerifyMessageReq(msg=(records[34349339]+bytes.fromhex(self_pubkey)+records[34349343]+records[34349334]), signature=records[34349337], pubkey=records[34349339])).valid
except:
- print('Unable to validate signature on invoice: ' + invoice.r_hash.hex())
+ print(f"{datetime.now().strftime('%c')} : Unable to validate signature on invoice: {invoice.r_hash.hex()}")
valid = False
sender = records[34349339].hex() if valid == True else None
try:
@@ -391,7 +330,7 @@ def update_channels(stub):
db_channel.auto_fees = pending_channel.auto_fees
pending_channel.delete()
if old_fee_rate is not None and old_fee_rate != local_policy.fee_rate_milli_msat:
- print (f"{datetime.now().strftime('%c')} : Ext Fee Change Detected {db_channel.chan_id=} {db_channel.alias=} {old_fee_rate=} {db_channel.local_fee_rate=}")
+ print(f"{datetime.now().strftime('%c')} : Ext Fee Change Detected {db_channel.chan_id=} {db_channel.alias=} {old_fee_rate=} {db_channel.local_fee_rate=}")
#External Fee change detected, update auto fee log
Autofees(chan_id=db_channel.chan_id, peer_alias=db_channel.alias, setting=(f"Ext"), old_value=old_fee_rate, new_value=db_channel.local_fee_rate).save()
db_channel.save()
@@ -483,7 +422,7 @@ def update_closures(stub):
try:
db_closure.save()
except Exception as e:
- print('Error inserting closure:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error inserting closure: {str(e)}")
Closures.objects.filter(funding_txid=txid,funding_index=index).delete()
return
if resolution_count > 0:
@@ -503,38 +442,36 @@ def reconnect_peers(stub):
if peers.filter(pubkey=inactive_peer).exists():
peer = peers.filter(pubkey=inactive_peer)[0]
if peer.last_reconnected == None or (int((datetime.now() - peer.last_reconnected).total_seconds() / 60) > 2):
- print (f"{datetime.now().strftime('%c')} : Reconnecting {peer.alias=} {peer.pubkey=} {peer.last_reconnected=}")
+ print(f"{datetime.now().strftime('%c')} : Reconnecting peer {peer.alias} {peer.pubkey=} {peer.last_reconnected=}")
if peer.connected == True:
- print (f"{datetime.now().strftime('%c')} : ... Inactive channel is still connected to peer, disconnecting peer. {peer.alias=} {inactive_peer=}")
+ print(f"{datetime.now().strftime('%c')} : ... Inactive channel is still connected to peer, disconnecting peer. {peer.alias=} {inactive_peer=}")
try:
response = stub.DisconnectPeer(ln.DisconnectPeerRequest(pub_key=inactive_peer))
- print (f"{datetime.now().strftime('%c')} : .... Status Disconnect {peer.alias=} {inactive_peer=} {response=}")
+ print(f"{datetime.now().strftime('%c')} : .... Disconnected peer {peer.alias} {inactive_peer=} {response=}")
peer.connected = False
peer.save()
except Exception as e:
- print (f"{datetime.now().strftime('%c')} : .... Error disconnecting {peer.alias} {inactive_peer=} {str(e)=}")
+ print(f"{datetime.now().strftime('%c')} : .... Error disconnecting peer {peer.alias} {inactive_peer=} {str(e)=}")
try:
node = stub.GetNodeInfo(ln.NodeInfoRequest(pub_key=inactive_peer, include_channels=False)).node
host = node.addresses[0].addr
except Exception as e:
- print (f"{datetime.now().strftime('%c')} : ... Unable to find node info on graph, using last known value {peer.alias=} {peer.pubkey=} {peer.address=} {str(e)=}")
+ print(f"{datetime.now().strftime('%c')} : ... Unable to find node info on graph, using last known value for {peer.alias} {peer.pubkey=} {peer.address=} {str(e)=}")
host = peer.address
#address = ln.LightningAddress(pubkey=inactive_peer, host=host)
- print (f"{datetime.now().strftime('%c')} : ... Attempting connection to {peer.alias=} {inactive_peer=} {host=}")
+ print(f"{datetime.now().strftime('%c')} : ... Attempting connection to {peer.alias} {inactive_peer=} {host=}")
try:
#try both the graph value and last know value
stub.ConnectPeer(request = ln.ConnectPeerRequest(addr=ln.LightningAddress(pubkey=inactive_peer, host=host), perm=True, timeout=5))
if host != peer.address and peer.address[:9] != '127.0.0.1':
stub.ConnectPeer(request = ln.ConnectPeerRequest(addr=ln.LightningAddress(pubkey=inactive_peer, host=peer.address), perm=True, timeout=5))
- #response = stub.ConnectPeer(request = ln.ConnectPeerRequest(addr=address, perm=False, timeout=5))
- #print (f"{datetime.now().strftime('%c')} : .... Status {peer.alias=} {inactive_peer=} {response=}")
except Exception as e:
error = str(e)
details_index = error.find('details =') + 11
debug_error_index = error.find('debug_error_string =') - 3
error_msg = error[details_index:debug_error_index]
- print (f"{datetime.now().strftime('%c')} : .... Error reconnecting {peer.alias} {inactive_peer=} {error_msg=}")
+ print(f"{datetime.now().strftime('%c')} : .... Error reconnecting {peer.alias} {inactive_peer=} {error_msg=}")
peer.last_reconnected = datetime.now()
peer.save()
@@ -563,11 +500,10 @@ def clean_payments(stub):
details_index = error.find('details =') + 11
debug_error_index = error.find('debug_error_string =') - 3
error_msg = error[details_index:debug_error_index]
- print (f"{datetime.now().strftime('%c')} : Error {payment.index=} {payment.status=} {payment.payment_hash=} {error_msg=}")
+ print(f"{datetime.now().strftime('%c')} : Error {payment.index=} {payment.status=} {payment.payment_hash=} {error_msg=}")
finally:
payment.cleaned = True
payment.save()
- #print (f"{datetime.now().strftime('%c')} : Cleaned {payment.index=} {payment.status=} {payment.cleaned=} {payment.payment_hash=}")
def auto_fees(stub):
if LocalSettings.objects.filter(key='AF-Enabled').exists():
@@ -665,7 +601,7 @@ def auto_fees(stub):
update_df = channels_df[channels_df['adjustment']!=0]
if not update_df.empty:
for target_channel in update_df.to_dict(orient='records'):
- print('Updating fees for channel ' + str(target_channel['chan_id']) + ' to a value of: ' + str(target_channel['new_rate']))
+ print(f"{datetime.now().strftime('%c')} : Updating fees for channel {str(target_channel['chan_id'])} to a value of: {str(target_channel['new_rate'])}")
channel = Channels.objects.filter(chan_id=target_channel['chan_id'])[0]
channel_point = ln.ChannelPoint()
channel_point.funding_txid_bytes = bytes.fromhex(channel.funding_txid)
@@ -677,8 +613,44 @@ def auto_fees(stub):
channel.save()
Autofees(chan_id=channel.chan_id, peer_alias=channel.alias, setting=(f"AF [ {target_channel['net_routed_7day']}:{target_channel['in_percent']}:{target_channel['out_percent']} ]"), old_value=target_channel['local_fee_rate'], new_value=target_channel['new_rate']).save()
+def agg_htlcs(target_htlcs, category):
+ try:
+ target_ids = target_htlcs.values_list('id')
+ agg_htlcs = FailedHTLCs.objects.filter(id__in=target_ids).annotate(day=TruncDay('timestamp')).values('day', 'chan_id_in', 'chan_id_out').annotate(amount=Sum('amount'), fee=Sum('missed_fee'), liq=Avg('chan_out_liq'), pending=Avg('chan_out_pending'), count=Count('id'), chan_in_alias=Max('chan_in_alias'), chan_out_alias=Max('chan_out_alias'))
+ for htlc in agg_htlcs:
+ if HistFailedHTLC.objects.filter(date=htlc['day'],chan_id_in=htlc['chan_id_in'],chan_id_out=htlc['chan_id_out']).exists():
+ htlc_itm = HistFailedHTLC.objects.filter(date=htlc['day'],chan_id_in=htlc['chan_id_in'],chan_id_out=htlc['chan_id_out']).get()
+ else:
+ htlc_itm = HistFailedHTLC(htlc_count=0, amount_sum=0, fee_sum=0, liq_avg=0, pending_avg=0, balance_count=0, downstream_count=0, other_count=0)
+ htlc_itm.date = htlc['day']
+ htlc_itm.chan_id_in = htlc['chan_id_in']
+ htlc_itm.chan_id_out = htlc['chan_id_out']
+ htlc_itm.chan_in_alias = htlc['chan_in_alias']
+ htlc_itm.chan_out_alias = htlc['chan_out_alias']
+ htlc_itm.htlc_count += htlc['count']
+ htlc_itm.amount_sum += htlc['amount']
+ htlc_itm.fee_sum += htlc['fee']
+ htlc_itm.liq_avg += (htlc['count']/htlc_itm.htlc_count)*((0 if htlc['liq'] is None else htlc['liq'])-htlc_itm.liq_avg)
+ htlc_itm.pending_avg += (htlc['count']/htlc_itm.htlc_count)*((0 if htlc['pending'] is None else htlc['pending'])-htlc_itm.pending_avg)
+ if category == 'balance':
+ htlc_itm.balance_count += htlc['count']
+ elif category == 'downstream':
+ htlc_itm.downstream_count += htlc['count']
+ elif category == 'other':
+ htlc_itm.other_count += htlc['count']
+ htlc_itm.save()
+ FailedHTLCs.objects.filter(id__in=target_ids, chan_id_in=htlc['chan_id_in'], chan_id_out=htlc['chan_id_out']).annotate(day=TruncDay('timestamp')).filter(day=htlc['day']).delete()
+ except Exception as e:
+ print(f"{datetime.now().strftime('%c')} : Error processing background data: {str(e)}")
+
+def agg_failed_htlcs():
+ time_filter = datetime.now() - timedelta(days=30)
+ agg_htlcs(FailedHTLCs.objects.filter(timestamp__lte=time_filter, failure_detail=6)[:100], 'balance')
+ agg_htlcs(FailedHTLCs.objects.filter(timestamp__lte=time_filter, failure_detail=99)[:100], 'downstream')
+ agg_htlcs(FailedHTLCs.objects.filter(timestamp__lte=time_filter).exclude(failure_detail__in=[6, 99])[:100], 'other')
+
+
def main():
- #print (f"{datetime.now().strftime('%c')} : Entering Jobs")
try:
stub = lnrpc.LightningStub(lnd_connect())
#Update data
@@ -692,8 +664,8 @@ def main():
reconnect_peers(stub)
clean_payments(stub)
auto_fees(stub)
+ agg_failed_htlcs()
except Exception as e:
- print (f"{datetime.now().strftime('%c')} : Error processing background data: {str(e)=}")
- #print (f"{datetime.now().strftime('%c')} : Exit Jobs")
+ print(f"{datetime.now().strftime('%c')} : Error processing background data: {str(e)}")
if __name__ == '__main__':
main()
diff --git a/rebalancer.py b/rebalancer.py
index bc3466b9..5b99f34a 100644
--- a/rebalancer.py
+++ b/rebalancer.py
@@ -8,6 +8,8 @@
from gui.lnd_deps import router_pb2_grpc as lnrouter
from gui.lnd_deps.lnd_connect import lnd_connect, async_lnd_connect
from os import environ
+from typing import List
+
environ['DJANGO_SETTINGS_MODULE'] = 'lndg.settings'
django.setup()
from gui.models import Rebalancer, Channels, LocalSettings, Forwards, Autopilot
@@ -17,28 +19,33 @@ def get_out_cans(rebalance, auto_rebalance_channels):
try:
return list(auto_rebalance_channels.filter(auto_rebalance=False, percent_outbound__gte=F('ar_out_target')).exclude(remote_pubkey=rebalance.last_hop_pubkey).values_list('chan_id', flat=True))
except Exception as e:
- print(datetime.now(), 'Error getting outbound cands:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error getting outbound cands: {str(e)}")
@sync_to_async
def save_record(record):
try:
record.save()
except Exception as e:
- print(datetime.now(), 'Error saving database record:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error saving database record: {str(e)}")
@sync_to_async
def inbound_cans_len(inbound_cans):
try:
return len(inbound_cans)
except Exception as e:
- print(datetime.now(), 'Error getting inbound cands:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error getting inbound cands: {str(e)}")
async def run_rebalancer(rebalance, worker):
try:
- auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).annotate(inbound_can=(((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity'))/Sum('ar_in_target'))
+ #Reduce potential rebalance value in percent out to avoid going below AR-OUT-Target
+ auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound')-rebalance.value)*100)/Sum('capacity')).annotate(inbound_can=(((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity'))/Sum('ar_in_target'))
outbound_cans = await get_out_cans(rebalance, auto_rebalance_channels)
if len(outbound_cans) == 0 and rebalance.manual == False:
- print(datetime.now(), 'No outbound_cans')
+ print(f"{datetime.now().strftime('%c')} : No outbound_cans")
+ rebalance.status = 406
+ rebalance.start = datetime.now()
+ rebalance.stop = datetime.now()
+ await save_record(rebalance)
return None
elif str(outbound_cans).replace('\'', '') != rebalance.outgoing_chan_ids and rebalance.manual == False:
rebalance.outgoing_chan_ids = str(outbound_cans).replace('\'', '')
@@ -50,9 +57,9 @@ async def run_rebalancer(rebalance, worker):
chan_ids = json.loads(rebalance.outgoing_chan_ids)
timeout = rebalance.duration * 60
invoice_response = stub.AddInvoice(ln.Invoice(value=rebalance.value, expiry=timeout))
- print(datetime.now(), worker, 'starting rebalance for:', rebalance.target_alias, ':', rebalance.last_hop_pubkey, 'Amount:', rebalance.value, 'Duration:', rebalance.duration, 'via:', chan_ids )
+ print(f"{datetime.now().strftime('%c')} : {worker} starting rebalance for: {rebalance.target_alias} {rebalance.last_hop_pubkey=} {rebalance.value=} {rebalance.duration=} {chan_ids=}")
async for payment_response in routerstub.SendPaymentV2(lnr.SendPaymentRequest(payment_request=str(invoice_response.payment_request), fee_limit_msat=int(rebalance.fee_limit*1000), outgoing_chan_ids=chan_ids, last_hop_pubkey=bytes.fromhex(rebalance.last_hop_pubkey), timeout_seconds=(timeout-5), allow_self_payment=True), timeout=(timeout+60)):
- print (datetime.now(), worker, 'got a payment response:', payment_response.status, 'with reason:', payment_response.failure_reason, 'for payment hash:', payment_response.payment_hash)
+ #print(f"{datetime.now().strftime('%c')} : DEBUG {worker} got a payment response: {payment_response.status=} {payment_response.failure_reason=} {payment_response.payment_hash=}")
if payment_response.status == 1 and rebalance.status == 0:
#IN-FLIGHT
rebalance.payment_hash = payment_response.payment_hash
@@ -87,39 +94,71 @@ async def run_rebalancer(rebalance, worker):
rebalance.status = 408
else:
rebalance.status = 400
- print(datetime.now(), 'Error while sending payment:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error while sending payment: {str(e)}")
finally:
rebalance.stop = datetime.now()
await save_record(rebalance)
- print(datetime.now(), worker, 'completed payment attempts for:', rebalance.payment_hash)
+ print(f"{datetime.now().strftime('%c')} : {worker} completed payment attempts for: {rebalance.payment_hash=}")
original_alias = rebalance.target_alias
inc=1.21
dec=2
if rebalance.status ==2:
await update_channels(stub, rebalance.last_hop_pubkey, successful_out)
- auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).annotate(inbound_can=(((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity'))/Sum('ar_in_target'))
+ #Reduce potential rebalance value in percent out to avoid going below AR-OUT-Target
+ auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound')-rebalance.value*inc)*100)/Sum('capacity')).annotate(inbound_can=(((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity'))/Sum('ar_in_target'))
inbound_cans = auto_rebalance_channels.filter(remote_pubkey=rebalance.last_hop_pubkey).filter(auto_rebalance=True, inbound_can__gte=1)
outbound_cans = await get_out_cans(rebalance, auto_rebalance_channels)
if await inbound_cans_len(inbound_cans) > 0 and len(outbound_cans) > 0:
next_rebalance = Rebalancer(value=int(rebalance.value*inc), fee_limit=round(rebalance.fee_limit*inc, 3), outgoing_chan_ids=str(outbound_cans).replace('\'', ''), last_hop_pubkey=rebalance.last_hop_pubkey, target_alias=original_alias, duration=1)
await save_record(next_rebalance)
- print (f"{datetime.now()} RapidFire up {next_rebalance.target_alias=} {next_rebalance.value=} {rebalance.value=}")
+ print(f"{datetime.now().strftime('%c')} : RapidFire up {next_rebalance.target_alias=} {next_rebalance.value=} {rebalance.value=}")
else:
next_rebalance = None
- elif rebalance.status > 2 and rebalance.duration <= 1 and rebalance.value > 69420:
+ # For failed rebalances, try in rapid fire with reduced balances until give up.
+ elif rebalance.status > 2 and rebalance.value > 69420:
#Previous Rapidfire with increased value failed, try with lower value up to 69420.
+ if rebalance.duration > 1:
+ next_value = await estimate_liquidity ( payment_response )
+ if next_value < 1000:
+ next_rebalance = None
+ return next_rebalance
+ else:
+ next_value = rebalance.value/dec
+
inbound_cans = auto_rebalance_channels.filter(remote_pubkey=rebalance.last_hop_pubkey).filter(auto_rebalance=True, inbound_can__gte=1)
if await inbound_cans_len(inbound_cans) > 0 and len(outbound_cans) > 0:
- next_rebalance = Rebalancer(value=int(rebalance.value/dec), fee_limit=round(rebalance.fee_limit/dec, 3), outgoing_chan_ids=str(outbound_cans).replace('\'', ''), last_hop_pubkey=rebalance.last_hop_pubkey, target_alias=original_alias, duration=1)
+ next_rebalance = Rebalancer(value=int(next_value), fee_limit=round(rebalance.fee_limit/(rebalance.value/next_value), 3), outgoing_chan_ids=str(outbound_cans).replace('\'', ''), last_hop_pubkey=rebalance.last_hop_pubkey, target_alias=original_alias, duration=1)
await save_record(next_rebalance)
- print (f"{datetime.now()} RapidFire Down {next_rebalance.target_alias=} {next_rebalance.value=} {rebalance.value=}")
+ print(f"{datetime.now().strftime('%c')} : RapidFire Down {next_rebalance.target_alias=} {next_rebalance.value=} {rebalance.value=}")
else:
next_rebalance = None
else:
next_rebalance = None
return next_rebalance
except Exception as e:
- print(datetime.now(), 'Error running rebalance attempt:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error running rebalance attempt: {str(e)}")
+
+@sync_to_async
+def estimate_liquidity( payment ):
+ try:
+ estimated_liquidity = 0
+ if payment.status == 3:
+ attempt = None
+ for attempt in payment.htlcs:
+ total_hops=len(attempt.route.hops)
+ #Failure Codes https://github.com/lightningnetwork/lnd/blob/9f013f5058a7780075bca393acfa97aa0daec6a0/lnrpc/lightning.proto#L4200
+ #print(f"{datetime.now().strftime('%c')} : DEBUG Liquidity Estimation {attempt.attempt_id=} {attempt.status=} {attempt.failure.code=} {attempt.failure.failure_source_index=} {total_hops=} {attempt.route.total_amt=} {payment.value_msat/1000=} {estimated_liquidity=} {payment.payment_hash=}")
+ if attempt.failure.failure_source_index == total_hops:
+ #Failure from last hop indicating liquidity available
+ estimated_liquidity = attempt.route.total_amt if attempt.route.total_amt > estimated_liquidity else estimated_liquidity
+ chan_id=attempt.route.hops[len(attempt.route.hops)-1].chan_id
+ #print(f"{datetime.now().strftime('%c')} : DEBUG Liquidity Estimation {attempt.attempt_id=} {attempt.status=} {attempt.failure.code=} {chan_id=} {attempt.route.total_amt=} {payment.value_msat/1000=} {estimated_liquidity=} {payment.payment_hash=}")
+ print(f"{datetime.now().strftime('%c')} : Estimated Liquidity {estimated_liquidity=} {payment.payment_hash=} {payment.status=} {payment.failure_reason=}")
+ except Exception as e:
+ print(f"{datetime.now().strftime('%c')} : Error estimating liquidity: {str(e)}")
+ estimated_liquidity = 0
+
+ return estimated_liquidity
@sync_to_async
def update_channels(stub, incoming_channel, outgoing_channel):
@@ -137,10 +176,10 @@ def update_channels(stub, incoming_channel, outgoing_channel):
db_channel.remote_balance = channel.remote_balance
db_channel.save()
except Exception as e:
- print(datetime.now(), 'Error updating channel balances:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error updating channel balances: {str(e)}")
@sync_to_async
-def auto_schedule():
+def auto_schedule() -> List[Rebalancer]:
try:
#No rebalancer jobs have been scheduled, lets look for any channels with an auto_rebalance flag and make the best request if we find one
if LocalSettings.objects.filter(key='AR-Enabled').exists():
@@ -148,64 +187,72 @@ def auto_schedule():
else:
LocalSettings(key='AR-Enabled', value='0').save()
enabled = 0
- scheduled_ids = []
- if enabled == 1:
- auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).annotate(inbound_can=(((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity'))/Sum('ar_in_target'))
- if len(auto_rebalance_channels) > 0:
- if not LocalSettings.objects.filter(key='AR-Outbound%').exists():
- LocalSettings(key='AR-Outbound%', value='75').save()
- if not LocalSettings.objects.filter(key='AR-Inbound%').exists():
- LocalSettings(key='AR-Inbound%', value='100').save()
- outbound_cans = list(auto_rebalance_channels.filter(auto_rebalance=False, percent_outbound__gte=F('ar_out_target')).values_list('chan_id', flat=True))
- already_scheduled = Rebalancer.objects.exclude(last_hop_pubkey='').filter(status=0).values_list('last_hop_pubkey')
- inbound_cans = auto_rebalance_channels.filter(auto_rebalance=True, inbound_can__gte=1).exclude(remote_pubkey__in=already_scheduled).order_by('-remote_balance')
- if len(inbound_cans) > 0 and len(outbound_cans) > 0:
- if LocalSettings.objects.filter(key='AR-MaxFeeRate').exists():
- max_fee_rate = int(LocalSettings.objects.filter(key='AR-MaxFeeRate')[0].value)
- else:
- LocalSettings(key='AR-MaxFeeRate', value='100').save()
- max_fee_rate = 100
- if LocalSettings.objects.filter(key='AR-Variance').exists():
- variance = int(LocalSettings.objects.filter(key='AR-Variance')[0].value)
- else:
- LocalSettings(key='AR-Variance', value='0').save()
- variance = 0
- if LocalSettings.objects.filter(key='AR-WaitPeriod').exists():
- wait_period = int(LocalSettings.objects.filter(key='AR-WaitPeriod')[0].value)
- else:
- LocalSettings(key='AR-WaitPeriod', value='30').save()
- wait_period = 30
- if not LocalSettings.objects.filter(key='AR-Target%').exists():
- LocalSettings(key='AR-Target%', value='5').save()
- if not LocalSettings.objects.filter(key='AR-MaxCost%').exists():
- LocalSettings(key='AR-MaxCost%', value='65').save()
- for target in inbound_cans:
- target_fee_rate = int(target.local_fee_rate * (target.ar_max_cost/100))
- if target_fee_rate > 0 and target_fee_rate > target.remote_fee_rate:
- target_value = int(target.ar_amt_target+(target.ar_amt_target*((secrets.choice(range(-1000,1001))/1000)*variance/100)))
- target_fee = round(target_fee_rate*target_value*0.000001, 3) if target_fee_rate <= max_fee_rate else round(max_fee_rate*target_value*0.000001, 3)
- if target_fee > 0:
- if LocalSettings.objects.filter(key='AR-Time').exists():
- target_time = int(LocalSettings.objects.filter(key='AR-Time')[0].value)
- else:
- LocalSettings(key='AR-Time', value='5').save()
- target_time = 5
- # TLDR: willing to pay 1 sat for every value_per_fee sats moved
- if Rebalancer.objects.filter(last_hop_pubkey=target.remote_pubkey).exclude(status=0).exists():
- last_rebalance = Rebalancer.objects.filter(last_hop_pubkey=target.remote_pubkey).exclude(status=0).order_by('-id')[0]
- if not (last_rebalance.status == 2 or (last_rebalance.status in [3, 4, 5, 6, 7, 400, 408] and (int((datetime.now() - last_rebalance.stop).total_seconds() / 60) > wait_period)) or (last_rebalance.status == 1 and (int((datetime.now() - last_rebalance.start).total_seconds() / 60) > wait_period))):
- continue
- print(datetime.now(), 'Creating Auto Rebalance Request for:', target.chan_id)
- print(datetime.now(), 'Request routing through:', outbound_cans)
- print(datetime.now(), 'Target Value:', target_value, '/', target.ar_amt_target)
- print(datetime.now(), 'Target Fee:', target_fee)
- print(datetime.now(), 'Target Time:', target_time)
- new_rebalance = Rebalancer(value=target_value, fee_limit=target_fee, outgoing_chan_ids=str(outbound_cans).replace('\'', ''), last_hop_pubkey=target.remote_pubkey, target_alias=target.alias, duration=target_time)
- new_rebalance.save()
- scheduled_ids.append(new_rebalance.id)
- return scheduled_ids
+ if enabled == 0:
+ return []
+
+ auto_rebalance_channels = Channels.objects.filter(is_active=True, is_open=True, private=False).annotate(percent_outbound=((Sum('local_balance')+Sum('pending_outbound'))*100)/Sum('capacity')).annotate(inbound_can=(((Sum('remote_balance')+Sum('pending_inbound'))*100)/Sum('capacity'))/Sum('ar_in_target'))
+ if len(auto_rebalance_channels) == 0:
+ return []
+
+ if not LocalSettings.objects.filter(key='AR-Outbound%').exists():
+ LocalSettings(key='AR-Outbound%', value='75').save()
+ if not LocalSettings.objects.filter(key='AR-Inbound%').exists():
+ LocalSettings(key='AR-Inbound%', value='100').save()
+ outbound_cans = list(auto_rebalance_channels.filter(auto_rebalance=False, percent_outbound__gte=F('ar_out_target')).values_list('chan_id', flat=True))
+ already_scheduled = Rebalancer.objects.exclude(last_hop_pubkey='').filter(status=0).values_list('last_hop_pubkey')
+ inbound_cans = auto_rebalance_channels.filter(auto_rebalance=True, inbound_can__gte=1).exclude(remote_pubkey__in=already_scheduled).order_by('-inbound_can')
+ if len(inbound_cans) == 0 or len(outbound_cans) == 0:
+ return []
+
+ if LocalSettings.objects.filter(key='AR-MaxFeeRate').exists():
+ max_fee_rate = int(LocalSettings.objects.filter(key='AR-MaxFeeRate')[0].value)
+ else:
+ LocalSettings(key='AR-MaxFeeRate', value='100').save()
+ max_fee_rate = 100
+ if LocalSettings.objects.filter(key='AR-Variance').exists():
+ variance = int(LocalSettings.objects.filter(key='AR-Variance')[0].value)
+ else:
+ LocalSettings(key='AR-Variance', value='0').save()
+ variance = 0
+ if LocalSettings.objects.filter(key='AR-WaitPeriod').exists():
+ wait_period = int(LocalSettings.objects.filter(key='AR-WaitPeriod')[0].value)
+ else:
+ LocalSettings(key='AR-WaitPeriod', value='30').save()
+ wait_period = 30
+ if not LocalSettings.objects.filter(key='AR-Target%').exists():
+ LocalSettings(key='AR-Target%', value='5').save()
+ if not LocalSettings.objects.filter(key='AR-MaxCost%').exists():
+ LocalSettings(key='AR-MaxCost%', value='65').save()
+ to_schedule = []
+ for target in inbound_cans:
+ target_fee_rate = int(target.local_fee_rate * (target.ar_max_cost/100))
+ if target_fee_rate > 0 and target_fee_rate > target.remote_fee_rate:
+ target_value = int(target.ar_amt_target+(target.ar_amt_target*((secrets.choice(range(-1000,1001))/1000)*variance/100)))
+ target_fee = round(target_fee_rate*target_value*0.000001, 3) if target_fee_rate <= max_fee_rate else round(max_fee_rate*target_value*0.000001, 3)
+ if target_fee == 0:
+ return []
+
+ if LocalSettings.objects.filter(key='AR-Time').exists():
+ target_time = int(LocalSettings.objects.filter(key='AR-Time')[0].value)
+ else:
+ LocalSettings(key='AR-Time', value='5').save()
+ target_time = 5
+ # TLDR: willing to pay 1 sat for every value_per_fee sats moved
+ if Rebalancer.objects.filter(last_hop_pubkey=target.remote_pubkey).exclude(status=0).exists():
+ last_rebalance = Rebalancer.objects.filter(last_hop_pubkey=target.remote_pubkey).exclude(status=0).order_by('-id')[0]
+ if not (last_rebalance.status == 2 or (last_rebalance.status in [3, 4, 5, 6, 7, 400, 408, 499] and (int((datetime.now() - last_rebalance.stop).total_seconds() / 60) > wait_period)) or (last_rebalance.status == 1 and (int((datetime.now() - last_rebalance.start).total_seconds() / 60) > wait_period))):
+ continue
+ print(f"{datetime.now().strftime('%c')} : Creating Auto Rebalance Request for: {target.chan_id}")
+ print(f"{datetime.now().strftime('%c')} : Request routing through: {outbound_cans}")
+ print(f"{datetime.now().strftime('%c')} : {target_value} / {target.ar_amt_target}")
+ print(f"{datetime.now().strftime('%c')} : {target_fee}")
+ print(f"{datetime.now().strftime('%c')} : {target_time}")
+ new_rebalance = Rebalancer(value=target_value, fee_limit=target_fee, outgoing_chan_ids=str(outbound_cans).replace('\'', ''), last_hop_pubkey=target.remote_pubkey, target_alias=target.alias, duration=target_time)
+ new_rebalance.save()
+ to_schedule.append(new_rebalance)
+ return to_schedule
except Exception as e:
- print(datetime.now(), 'Error scheduling rebalances:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error scheduling rebalances: {str(e)}")
@sync_to_async
def auto_enable():
@@ -237,7 +284,7 @@ def auto_enable():
#print('Processing: ', peer_channel.alias, ' : ', peer_channel.chan_id, ' : ', oapD, " : ", iapD, ' : ', outbound_percent, ' : ', inbound_percent)
if peer_channel.ar_out_target == 100 and peer_channel.auto_rebalance == True:
#Special Case for LOOP, Wos, etc. Always Auto Rebalance if enabled to keep outbound full.
- print (f"{datetime.now()} Skipping AR enabled and 100% oTarget channel... {peer_channel.alias=} {peer_channel.chan_id=}")
+ print(f"{datetime.now().strftime('%c')} : Skipping AR enabled and 100% oTarget channel... {peer_channel.alias=} {peer_channel.chan_id=}")
pass
elif oapD > (iapD*1.10) and outbound_percent > 75:
#print('Case 1: Pass')
@@ -247,13 +294,13 @@ def auto_enable():
peer_channel.auto_rebalance = True
peer_channel.save()
Autopilot(chan_id=peer_channel.chan_id, peer_alias=peer_channel.alias, setting='Enabled', old_value=0, new_value=1).save()
- print(datetime.now(), 'Auto Pilot Enabled: ', peer_channel.alias, ' : ', peer_channel.chan_id , ' Out: ', oapD, ' In: ', iapD)
+ print(f"{datetime.now().strftime('%c')} : Auto Pilot Enabled: {peer_channel.alias=} {peer_channel.chan_id=} {oapD=} {iapD=}")
elif oapD < (iapD*1.10) and outbound_percent > 75 and peer_channel.auto_rebalance == True:
#print('Case 3: Disable AR - o7D < i7D AND Outbound Liq > 75%')
peer_channel.auto_rebalance = False
peer_channel.save()
Autopilot(chan_id=peer_channel.chan_id, peer_alias=peer_channel.alias, setting='Enabled', old_value=1, new_value=0).save()
- print(datetime.now(), 'Auto Pilot Disabled (3): ', peer_channel.alias, ' : ', peer_channel.chan_id, ' Out: ', oapD, ' In: ', iapD )
+ print(f"{datetime.now().strftime('%c')} : Auto Pilot Disabled (3): {peer_channel.alias=} {peer_channel.chan_id=} {oapD=} {iapD=}" )
elif oapD < (iapD*1.10) and inbound_percent > 75:
#print('Case 4: Pass')
pass
@@ -261,14 +308,7 @@ def auto_enable():
#print('Case 5: Pass')
pass
except Exception as e:
- print(datetime.now(), 'Error during auto channel enabling:', str(e))
-
-@sync_to_async
-def get_scheduled_rebal(id):
- try:
- return Rebalancer.objects.get(id=id)
- except Exception as e:
- print(datetime.now(), 'Error getting scheduled rebalances:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error during auto channel enabling: {str(e)}")
@sync_to_async
def get_pending_rebals():
@@ -276,46 +316,45 @@ def get_pending_rebals():
rebalances = Rebalancer.objects.filter(status=0).order_by('id')
return rebalances, len(rebalances)
except Exception as e:
- print(datetime.now(), 'Error getting pending rebalances:', str(e))
+ print(f"{datetime.now().strftime('%c')} : Error getting pending rebalances: {str(e)}")
shutdown_rebalancer = False
active_rebalances = []
async def async_queue_manager(rebalancer_queue):
- print(datetime.now(), 'Queue manager is starting...')
+ print(f"{datetime.now().strftime('%c')} : Queue manager is starting...")
pending_rebalances, rebal_count = await get_pending_rebals()
if rebal_count > 0:
for rebalance in pending_rebalances:
await rebalancer_queue.put(rebalance)
- while True:
- try:
+ try:
+ while True:
global active_rebalances
- print(datetime.now(), 'Queue currently has', rebalancer_queue.qsize(), 'items...')
- print(datetime.now(), 'There are currently', len(active_rebalances), 'tasks in progress...')
- print(datetime.now(), 'Queue manager is checking for more work...')
+ print(f"{datetime.now().strftime('%c')} : Queue currently has {rebalancer_queue.qsize()} items...")
+ print(f"{datetime.now().strftime('%c')} : There are currently {len(active_rebalances)} tasks in progress...")
+ print(f"{datetime.now().strftime('%c')} : Queue manager is checking for more work...")
await auto_enable()
- scheduled_ids = await auto_schedule()
- if len(scheduled_ids) > 0:
- print(datetime.now(), 'Scheduling', len(scheduled_ids), 'more jobs...')
- for id in scheduled_ids:
- scheduled_rebal = await get_scheduled_rebal(id)
- await rebalancer_queue.put(scheduled_rebal)
+ scheduled = await auto_schedule()
+ if len(scheduled) > 0:
+ print(f"{datetime.now().strftime('%c')} : Scheduling {len(scheduled)} more jobs...")
+ for rebalance in scheduled:
+ await rebalancer_queue.put(rebalance)
elif rebalancer_queue.qsize() == 0 and len(active_rebalances) == 0:
- print(datetime.now(), 'Queue is still empty, stoping the rebalancer...')
+ print(f"{datetime.now().strftime('%c')} : Queue is still empty, stoping the rebalancer...")
global shutdown_rebalancer
shutdown_rebalancer = True
return
await asyncio.sleep(30)
- except Exception as e:
- print(datetime.now(), 'Queue manager exception:', str(e))
- finally:
- print(datetime.now(), 'Queue manager has shut down...')
+ except Exception as e:
+ print(f"{datetime.now().strftime('%c')} : Queue manager exception: {str(e)}")
+ finally:
+ print(f"{datetime.now().strftime('%c')} : Queue manager has shut down...")
async def async_run_rebalancer(worker, rebalancer_queue):
while True:
global active_rebalances, shutdown_rebalancer
if not rebalancer_queue.empty():
rebalance = await rebalancer_queue.get()
- print(datetime.now(), worker + ' is starting a new request...')
+ print(f"{datetime.now().strftime('%c')} : {worker} is starting a new request...")
active_rebalance_id = None
if rebalance != None:
active_rebalance_id = rebalance.id
@@ -324,7 +363,7 @@ async def async_run_rebalancer(worker, rebalancer_queue):
rebalance = await run_rebalancer(rebalance, worker)
if active_rebalance_id != None:
active_rebalances.remove(active_rebalance_id)
- print(datetime.now(), worker + ' completed its request...')
+ print(f"{datetime.now().strftime('%c')} : {worker} completed its request...")
else:
if shutdown_rebalancer == True:
return
@@ -335,7 +374,7 @@ async def start_queue(worker_count=1):
manager = asyncio.create_task(async_queue_manager(rebalancer_queue))
workers = [asyncio.create_task(async_run_rebalancer("Worker " + str(worker_num+1), rebalancer_queue)) for worker_num in range(worker_count)]
await asyncio.gather(manager, *workers)
- print(datetime.now(), 'Manager and workers have stopped...')
+ print(f"{datetime.now().strftime('%c')} : Manager and workers have stopped...")
def main():
if Rebalancer.objects.filter(status=1).exists():
@@ -350,7 +389,7 @@ def main():
LocalSettings(key='AR-Workers', value='1').save()
worker_count = 1
asyncio.run(start_queue(worker_count))
- print(datetime.now(), 'Rebalancer successfully shutdown...')
+ print(f"{datetime.now().strftime('%c')} : Rebalancer successfully shutdown...")
if __name__ == '__main__':
main()
diff --git a/requirements.txt b/requirements.txt
index da904434..5f1505c7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,6 @@
Django
djangorestframework
+django-filter
django-qr-code
grpcio
protobuf