Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Log assertions: add assertions for log errors in UDP tracker #1165

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/servers/udp/connection_cookie.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ use std::ops::Range;
/// # Panics
///
/// It would panic if the range start is not smaller than it's end.
#[instrument(err)]
#[instrument]
pub fn check(cookie: &Cookie, fingerprint: u64, valid_range: Range<f64>) -> Result<f64, Error> {
assert!(valid_range.start <= valid_range.end, "range start is larger than range end");

Expand Down
33 changes: 30 additions & 3 deletions src/servers/udp/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use super::server::banning::BanService;
use super::RawRequest;
use crate::core::{statistics, PeersWanted, Tracker};
use crate::servers::udp::error::Error;
use crate::servers::udp::peer_builder;
use crate::servers::udp::{peer_builder, UDP_TRACKER_LOG_TARGET};
use crate::shared::bit_torrent::common::MAX_SCRAPE_TORRENTS;
use crate::CurrentClock;

Expand Down Expand Up @@ -61,7 +61,9 @@ pub(crate) async fn handle_packet(
cookie_time_values: CookieTimeValues,
ban_service: Arc<RwLock<BanService>>,
) -> Response {
tracing::Span::current().record("request_id", Uuid::new_v4().to_string());
let request_id = Uuid::new_v4();

tracing::Span::current().record("request_id", request_id.to_string());
tracing::debug!("Handling Packets: {udp_request:?}");

let start_time = Instant::now();
Expand All @@ -84,6 +86,8 @@ pub(crate) async fn handle_packet(

handle_error(
udp_request.from,
local_addr,
request_id,
tracker,
cookie_time_values.valid_range.clone(),
&e,
Expand All @@ -92,7 +96,18 @@ pub(crate) async fn handle_packet(
.await
}
},
Err(e) => handle_error(udp_request.from, tracker, cookie_time_values.valid_range.clone(), &e, None).await,
Err(e) => {
handle_error(
udp_request.from,
local_addr,
request_id,
tracker,
cookie_time_values.valid_range.clone(),
&e,
None,
)
.await
}
};

let latency = start_time.elapsed();
Expand Down Expand Up @@ -344,13 +359,25 @@ pub async fn handle_scrape(
#[instrument(fields(transaction_id), skip(tracker), ret(level = Level::TRACE))]
async fn handle_error(
remote_addr: SocketAddr,
local_addr: SocketAddr,
request_id: Uuid,
tracker: &Tracker,
cookie_valid_range: Range<f64>,
e: &Error,
transaction_id: Option<TransactionId>,
) -> Response {
tracing::trace!("handle error");

match transaction_id {
Some(transaction_id) => {
let transaction_id = transaction_id.0.to_string();
tracing::error!(target: UDP_TRACKER_LOG_TARGET, error = %e, %remote_addr, %local_addr, %request_id, %transaction_id, "response error");
}
None => {
tracing::error!(target: UDP_TRACKER_LOG_TARGET, error = %e, %remote_addr, %local_addr, %request_id, "response error");
}
}

let e = if let Error::RequestParseError { request_parse_error } = e {
match request_parse_error {
RequestParseError::Sendable {
Expand Down
7 changes: 7 additions & 0 deletions tests/common/fixtures.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use aquatic_udp_protocol::TransactionId;
use bittorrent_primitives::info_hash::InfoHash;

#[allow(dead_code)]
Expand All @@ -20,3 +21,9 @@ pub fn random_info_hash() -> InfoHash {

InfoHash::from_bytes(&random_bytes)
}

/// Returns a random transaction id.
pub fn random_transaction_id() -> TransactionId {
let random_value = rand::Rng::gen::<i32>(&mut rand::thread_rng());
TransactionId::new(random_value)
}
53 changes: 42 additions & 11 deletions tests/servers/udp/contract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,22 +118,30 @@ mod receiving_an_announce_request {
use torrust_tracker_configuration::DEFAULT_TIMEOUT;
use torrust_tracker_test_helpers::configuration;

use crate::common::logging;
use crate::common::fixtures::{random_info_hash, random_transaction_id};
use crate::common::logging::{self, logs_contains_a_line_with};
use crate::servers::udp::asserts::is_ipv4_announce_response;
use crate::servers::udp::contract::send_connection_request;
use crate::servers::udp::Started;

pub async fn assert_send_and_get_announce(tx_id: TransactionId, c_id: ConnectionId, client: &UdpTrackerClient) {
let response = send_and_get_announce(tx_id, c_id, client).await;
pub async fn assert_send_and_get_announce(
tx_id: TransactionId,
c_id: ConnectionId,
info_hash: bittorrent_primitives::info_hash::InfoHash,
client: &UdpTrackerClient,
) {
let response = send_and_get_announce(tx_id, c_id, info_hash, client).await;
assert!(is_ipv4_announce_response(&response));
}

pub async fn send_and_get_announce(
tx_id: TransactionId,
c_id: ConnectionId,
info_hash: bittorrent_primitives::info_hash::InfoHash,
client: &UdpTrackerClient,
) -> aquatic_udp_protocol::Response {
let announce_request = build_sample_announce_request(tx_id, c_id, client.client.socket.local_addr().unwrap().port());
let announce_request =
build_sample_announce_request(tx_id, c_id, client.client.socket.local_addr().unwrap().port(), info_hash);

match client.send(announce_request.into()).await {
Ok(_) => (),
Expand All @@ -146,12 +154,17 @@ mod receiving_an_announce_request {
}
}

fn build_sample_announce_request(tx_id: TransactionId, c_id: ConnectionId, port: u16) -> AnnounceRequest {
fn build_sample_announce_request(
tx_id: TransactionId,
c_id: ConnectionId,
port: u16,
info_hash: bittorrent_primitives::info_hash::InfoHash,
) -> AnnounceRequest {
AnnounceRequest {
connection_id: ConnectionId(c_id.0),
action_placeholder: AnnounceActionPlaceholder::default(),
transaction_id: tx_id,
info_hash: InfoHash([0u8; 20]),
info_hash: InfoHash(info_hash.0),
peer_id: PeerId([255u8; 20]),
bytes_downloaded: NumberOfBytes(0i64.into()),
bytes_uploaded: NumberOfBytes(0i64.into()),
Expand Down Expand Up @@ -179,7 +192,9 @@ mod receiving_an_announce_request {

let c_id = send_connection_request(tx_id, &client).await;

assert_send_and_get_announce(tx_id, c_id, &client).await;
let info_hash = random_info_hash();

assert_send_and_get_announce(tx_id, c_id, info_hash, &client).await;

env.stop().await;
}
Expand All @@ -199,9 +214,11 @@ mod receiving_an_announce_request {

let c_id = send_connection_request(tx_id, &client).await;

let info_hash = random_info_hash();

for x in 0..1000 {
tracing::info!("req no: {x}");
assert_send_and_get_announce(tx_id, c_id, &client).await;
assert_send_and_get_announce(tx_id, c_id, info_hash, &client).await;
}

env.stop().await;
Expand All @@ -218,25 +235,39 @@ mod receiving_an_announce_request {
Err(err) => panic!("{err}"),
};

let tx_id = TransactionId::new(123);

// The eleven first requests should be fine

let invalid_connection_id = ConnectionId::new(0); // Zero is one of the not normal values.

let info_hash = random_info_hash();

for x in 0..=10 {
tracing::info!("req no: {x}");
send_and_get_announce(tx_id, invalid_connection_id, &client).await;

let tx_id = random_transaction_id();

send_and_get_announce(tx_id, invalid_connection_id, info_hash, &client).await;

let transaction_id = tx_id.0.to_string();

assert!(
logs_contains_a_line_with(&["ERROR", "UDP TRACKER", &transaction_id.to_string()]),
"Expected logs to contain: ERROR ... UDP TRACKER ... transaction_id={transaction_id}"
);
}

// The twelfth request should be banned (timeout error)

let tx_id = random_transaction_id();

let announce_request = build_sample_announce_request(
tx_id,
invalid_connection_id,
client.client.socket.local_addr().unwrap().port(),
info_hash,
);

// This should return a timeout error
match client.send(announce_request.into()).await {
Ok(_) => (),
Err(err) => panic!("{err}"),
Expand Down
Loading