Skip to content

Commit

Permalink
Introduce a second decompose method with route ruin
Browse files Browse the repository at this point in the history
  • Loading branch information
reinterpretcat committed Oct 3, 2024
1 parent b4c8bd1 commit 1ec6c7e
Show file tree
Hide file tree
Showing 5 changed files with 57 additions and 26 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ are already published. So, I stick to it for now.
* apply some minor code refactorings
* apply some performance optimizations
* refactor dbscan clustering api
* improve heuristic a bit

### Removed

Expand Down
51 changes: 38 additions & 13 deletions vrp-core/src/solver/heuristic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,22 +551,13 @@ mod dynamic {
"local_swap_star".to_string(),
10.,
),
// decompose search methods with different inner heuristic
(
Arc::new(DecomposeSearch::new(
Arc::new(WeightedHeuristicOperator::new(
vec![
create_default_inner_ruin_recreate(problem.clone(), environment.clone()),
create_default_local_search(environment.random.clone()),
],
vec![10, 1],
)),
(2, 4),
2,
SINGLE_HEURISTIC_QUOTA_LIMIT,
)),
"decompose_search".to_string(),
create_variable_search_decompose_search(problem.clone(), environment.clone()),
"decompose_search_var".to_string(),
25.,
),
(create_composite_decompose_search(problem, environment), "decompose_search_com".to_string(), 25.),
]
}

Expand Down Expand Up @@ -675,6 +666,40 @@ mod dynamic {
1,
))))
}

fn create_variable_search_decompose_search(
problem: Arc<Problem>,
environment: Arc<Environment>,
) -> TargetSearchOperator {
Arc::new(DecomposeSearch::new(
Arc::new(WeightedHeuristicOperator::new(
vec![
create_default_inner_ruin_recreate(problem.clone(), environment.clone()),
create_default_local_search(environment.random.clone()),
],
vec![10, 1],
)),
(2, 4),
2,
SINGLE_HEURISTIC_QUOTA_LIMIT,
))
}

fn create_composite_decompose_search(problem: Arc<Problem>, environment: Arc<Environment>) -> TargetSearchOperator {
let limits = RemovalLimits { removed_activities_range: (10..100), affected_routes_range: 1..1 };
let route_removal_operator =
Arc::new(RuinAndRecreate::new(Arc::new(RandomRouteRemoval::new(limits)), Arc::new(DummyRecreate)));

Arc::new(DecomposeSearch::new(
Arc::new(CompositeHeuristicOperator::new(vec![
(route_removal_operator, 1.),
(create_default_inner_ruin_recreate(problem.clone(), environment.clone()), 1.),
])),
(2, 4),
2,
SINGLE_HEURISTIC_QUOTA_LIMIT,
))
}
}

fn get_recreate_with_alternative_goal<T, F>(
Expand Down
4 changes: 2 additions & 2 deletions vrp-core/src/solver/search/decompose_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ use std::cmp::Ordering;
use std::collections::HashSet;
use std::iter::{empty, once};

/// A search operator which decomposes original solution into multiple partial solutions,
/// preforms search independently, and then merges partial solution back into one solution.
/// A search operator which decomposes an original solution into multiple partial solutions,
/// performs search independently, and then merges partial solutions back into one solution.
pub struct DecomposeSearch {
inner_search: TargetSearchOperator,
max_routes_range: (i32, i32),
Expand Down
9 changes: 9 additions & 0 deletions vrp-core/src/solver/search/recreate/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,3 +162,12 @@ impl<T: Recreate> Recreate for RecreateWithGoal<T> {
insertion_ctx
}
}

/// A recreate strategy which does nothing.
pub(crate) struct DummyRecreate;

impl Recreate for DummyRecreate {
fn run(&self, _: &RefinementContext, insertion_ctx: InsertionContext) -> InsertionContext {
insertion_ctx
}
}
18 changes: 7 additions & 11 deletions vrp-core/src/solver/search/utils/removal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ use std::sync::Arc;
pub struct JobRemovalTracker {
activities_left: i32,
routes_left: i32,
has_fully_removed_routes: bool,
affected_actors: HashSet<Arc<Actor>>,
removed_jobs: HashSet<Job>,
}
Expand All @@ -27,7 +26,6 @@ impl JobRemovalTracker {
.uniform_int(limits.removed_activities_range.start as i32, limits.removed_activities_range.end as i32),
routes_left: random
.uniform_int(limits.affected_routes_range.start as i32, limits.affected_routes_range.end as i32),
has_fully_removed_routes: false,
affected_actors: HashSet::default(),
removed_jobs: HashSet::default(),
}
Expand Down Expand Up @@ -99,23 +97,23 @@ impl JobRemovalTracker {
fn can_remove_full_route(&self, solution: &SolutionContext, route_idx: usize, random: &(dyn Random)) -> bool {
let route_ctx = solution.routes.get(route_idx).expect("invalid route index");

let route_activities = route_ctx.route().tour.job_activity_count();
if route_activities == 0 {
return false;
}

// check locked jobs
let has_locked_jobs =
!solution.locked.is_empty() && route_ctx.route().tour.jobs().any(|job| solution.locked.contains(job));
if has_locked_jobs {
return false;
}

if route_ctx.route().tour.job_activity_count() as i32 <= self.activities_left {
if route_activities as i32 <= self.activities_left {
return true;
}

// try at least once remove a route completely
if !self.has_fully_removed_routes {
return random.is_hit(1. / self.routes_left.max(1) as Float);
}

false
random.is_hit((self.activities_left as Float / route_activities as Float).min(1.))
}

fn remove_whole_route(&mut self, solution: &mut SolutionContext, route_idx: usize) {
Expand All @@ -135,8 +133,6 @@ impl JobRemovalTracker {

self.affected_actors.insert(actor);
self.routes_left = (self.routes_left - 1).max(0);

self.has_fully_removed_routes = true;
}

fn try_remove_part_route(
Expand Down

0 comments on commit 1ec6c7e

Please sign in to comment.