Skip to content

Commit

Permalink
Remove adding headers is large bulks.
Browse files Browse the repository at this point in the history
  • Loading branch information
bayk committed Dec 7, 2024
1 parent c35105d commit 5999748
Showing 1 changed file with 4 additions and 30 deletions.
34 changes: 4 additions & 30 deletions chain/src/txhashset/headers_desegmenter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -379,8 +379,6 @@ impl<T> HeadersRecieveCache<T> {
.insert(first_header.height, (bhs, peer_info));

// Apply data from cache if possible
let mut headers_all: Vec<BlockHeader> = Vec::new();
let mut headers_by_peer: Vec<(Vec<BlockHeader>, T)> = Vec::new();
let tip = self
.chain
.header_head()
Expand All @@ -401,37 +399,13 @@ impl<T> HeadersRecieveCache<T> {
if *height > tip_height + 1 {
break;
}
let (_, (mut bhs, peer)) = self.main_headers_cache.pop_first().unwrap();
let (_, (bhs, peer)) = self.main_headers_cache.pop_first().unwrap();
tip_height = bhs.last().expect("bhs can't be empty").height;

headers_by_peer.push((bhs.clone(), peer));
headers_all.append(&mut bhs);
}

if !headers_all.is_empty() {
match self
.chain
.sync_block_headers(&headers_all, tip, Options::NONE)
{
// Adding headers into the blockchian. Adding by 512 is optimal, DB not design to add large number of headers
match self.chain.sync_block_headers(&bhs, tip, Options::NONE) {
Ok(_) => {}
Err(e) => {
warn!(
"add_headers in bulk is failed, will add one by one. Error: {}",
e
);
// apply one by one
for (hdr, peer) in headers_by_peer {
let tip = self
.chain
.header_head()
.expect("Header head must be always defined");

match self.chain.sync_block_headers(&hdr, tip, Options::NONE) {
Ok(_) => {}
Err(e) => return Err((peer, e)),
}
}
}
Err(e) => return Err((peer, e)),
}
}

Expand Down

0 comments on commit 5999748

Please sign in to comment.