Skip to content

Commit

Permalink
Merge branch 'jibeproject:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
IrenaItova authored Aug 28, 2023
2 parents 182b0fd + 911d9bc commit 9be6755
Show file tree
Hide file tree
Showing 26 changed files with 1,419 additions and 209 deletions.
146 changes: 146 additions & 0 deletions src/main/java/accessibility/InterventionCalculator.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
/*
* Copyright (C) Schweizerische Bundesbahnen SBB, 2018.
*/

package accessibility;

import accessibility.decay.DecayFunction;
import org.matsim.api.core.v01.Id;
import org.matsim.api.core.v01.IdMap;
import org.matsim.api.core.v01.network.Network;
import org.matsim.api.core.v01.network.Node;
import org.matsim.api.core.v01.population.Person;
import org.matsim.core.population.PopulationUtils;
import org.matsim.core.router.util.TravelDisutility;
import org.matsim.core.router.util.TravelTime;
import org.matsim.core.utils.misc.Counter;
import org.matsim.vehicles.Vehicle;
import resources.Properties;
import resources.Resources;
import routing.graph.LeastCostPathTree3;
import routing.graph.SpeedyGraph;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;


// Based on the skim matrix calculations from the MATSim SBB Extensions
public final class InterventionCalculator {

private final static Person PERSON = PopulationUtils.getFactory().createPerson(Id.create("thePerson", Person.class));
final SpeedyGraph routingGraph;
private final DecayFunction decayFunction;

public InterventionCalculator(Network routingNetwork, TravelTime travelTime, TravelDisutility travelDisutility,
Vehicle vehicle, DecayFunction decayFunction) {
this.routingGraph = new SpeedyGraph(routingNetwork,travelTime,travelDisutility,PERSON,vehicle);
this.decayFunction = decayFunction;
}


public Map<Id<Node>,Double> calculate(Set<Id<Node>> startNodes, Map<Id<Node>,Double> endNodes) {

int numberOfThreads = Resources.instance.getInt(Properties.NUMBER_OF_THREADS);

// prepare calculation
ConcurrentHashMap<Id<Node>,Double> accessibilityResults = new ConcurrentHashMap<>(startNodes.size());

// do calculation
ConcurrentLinkedQueue<Id<Node>> startNodesQueue = new ConcurrentLinkedQueue<>(startNodes);

Counter counter = new Counter("Calculating accessibility node ", " / " + startNodes.size());
Thread[] threads = new Thread[numberOfThreads];
for (int i = 0; i < numberOfThreads; i++) {
NodeWorker worker = new NodeWorker(startNodesQueue, endNodes, accessibilityResults, counter);
threads[i] = new Thread(worker, "Accessibility-" + i);
threads[i].start();
}

// wait until all threads have finished
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}

return Collections.unmodifiableMap(new HashMap<>(accessibilityResults));
}

public Map<Id<Node>,Double> calculateSingle(Set<Id<Node>> startNodes, Id<Node> newNode, Double wt) {

LeastCostPathTree3 lcpTreeFwd = new LeastCostPathTree3(routingGraph);
LeastCostPathTree3 lcpTreeRev = new LeastCostPathTree3(routingGraph);
LeastCostPathTree3.StopCriterion stopCriterion = decayFunction.getTreeStopCriterion();

lcpTreeFwd.calculate(newNode.index(),0.,stopCriterion,true);
lcpTreeRev.calculate(newNode.index(),0.,stopCriterion,false);

IdMap<Node,Double> result = new IdMap<>(Node.class);
for(Id<Node> node : startNodes) {
int toNodeIndex = node.index();
double dist = (lcpTreeFwd.getDistance(toNodeIndex) + lcpTreeRev.getDistance(toNodeIndex))/2;
double time = (lcpTreeFwd.getTime(toNodeIndex).orElse(Double.POSITIVE_INFINITY) +
lcpTreeRev.getTime(toNodeIndex).orElse(Double.POSITIVE_INFINITY))/2;
if(decayFunction.beyondCutoff(dist,time)) {
result.put(node,0.);
} else {
double cost = (lcpTreeFwd.getCost(toNodeIndex) + lcpTreeRev.getCost(toNodeIndex))/2;
result.put(node,decayFunction.getDecay(cost) * wt);
}

}
return result;
}

private class NodeWorker implements Runnable {
private final ConcurrentLinkedQueue<Id<Node>> startNodes;
private final Map<Id<Node>,Double> endNodes;
private final ConcurrentHashMap<Id<Node>,Double> accessibilityData;
private final Counter counter;

NodeWorker(ConcurrentLinkedQueue<Id<Node>> startNodes, Map<Id<Node>,Double> endNodes,
ConcurrentHashMap<Id<Node>,Double> results, Counter counter) {
this.startNodes = startNodes;
this.endNodes = endNodes;
this.accessibilityData = results;
this.counter = counter;
}

public void run() {
LeastCostPathTree3 lcpTreeFwd = new LeastCostPathTree3(routingGraph);
LeastCostPathTree3 lcpTreeRev = new LeastCostPathTree3(routingGraph);
LeastCostPathTree3.StopCriterion stopCriterion = decayFunction.getTreeStopCriterion();

while (true) {
Id<Node> fromNodeId = this.startNodes.poll();
if (fromNodeId == null) {
return;
}

this.counter.incCounter();
lcpTreeFwd.calculate(fromNodeId.index(),0.,stopCriterion,true);
lcpTreeRev.calculate(fromNodeId.index(),0.,stopCriterion,false);

double accessibility = 0.;

for (Map.Entry<Id<Node>, Double> e : this.endNodes.entrySet()) {
int toNodeIndex = e.getKey().index();
double dist = (lcpTreeFwd.getDistance(toNodeIndex) + lcpTreeRev.getDistance(toNodeIndex))/2;
double time = (lcpTreeFwd.getTime(toNodeIndex).orElse(Double.POSITIVE_INFINITY) +
lcpTreeRev.getTime(toNodeIndex).orElse(Double.POSITIVE_INFINITY))/2;
if(!decayFunction.beyondCutoff(dist,time)) {
double cost = (lcpTreeFwd.getCost(toNodeIndex) + lcpTreeRev.getCost(toNodeIndex))/2;
accessibility += decayFunction.getDecay(cost) * e.getValue();
}
}
this.accessibilityData.put(fromNodeId,accessibility);
}
}
}
}
45 changes: 39 additions & 6 deletions src/main/java/accessibility/LocationData.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
import org.matsim.api.core.v01.Coord;
import org.matsim.api.core.v01.Id;
import org.matsim.api.core.v01.IdMap;
import org.matsim.api.core.v01.IdSet;
import org.matsim.api.core.v01.network.Network;
import org.matsim.api.core.v01.network.Node;
Expand All @@ -26,6 +28,7 @@ public class LocationData {
private final static String WEIGHT_VAR = "WEIGHT";

private final Map<String, List<Coord>> coords = new LinkedHashMap<>();
private final Map<String, IdSet<Node>> nodes = new LinkedHashMap<>();
private final Map<String, Double> weights = new LinkedHashMap<>();

public LocationData(String filename, Geometry boundary) throws IOException {
Expand Down Expand Up @@ -86,16 +89,46 @@ public Map<String, Double> getWeights() {
return Collections.unmodifiableMap(weights);
}

public Map<String, IdSet<Node>> getNodes(Network xy2lNetwork) {
Map<String, IdSet<Node>> idNodeMap = new LinkedHashMap<>();
public void estimateNetworkNodes(Network xy2lNetwork) {
for (Map.Entry<String, List<Coord>> e : coords.entrySet()) {
IdSet<Node> nodes = new IdSet<>(Node.class);
IdSet<Node> nodeIds = new IdSet<>(Node.class);
for (Coord coord : e.getValue()) {
nodes.add(NetworkUtils.getNearestLinkExactly(xy2lNetwork, coord).getToNode().getId());
nodeIds.add(NetworkUtils.getNearestLinkExactly(xy2lNetwork, coord).getToNode().getId());
}
idNodeMap.put(e.getKey(), nodes);
nodes.put(e.getKey(), nodeIds);
}
return Collections.unmodifiableMap(idNodeMap);
}

public Map<String, IdSet<Node>> getNodes() {
return Collections.unmodifiableMap(nodes);
}

public IdMap<Node,String> getNodeIdMap() {

IdMap<Node,String> idNodeMap = new IdMap<>(Node.class);
for (Map.Entry<String, IdSet<Node>> e : nodes.entrySet()) {
Iterator<Id<Node>> it = e.getValue().iterator();
Id<Node> nodeId = it.next();
if(it.hasNext()) {
throw new RuntimeException("Node maps possible only with one node per location!");
}
idNodeMap.put(nodeId,e.getKey());

}
return idNodeMap;
}

public IdMap<Node,Double> getNodeWeightMap() {
IdMap<Node,Double> nodeWeightMap = new IdMap<>(Node.class);
for(Map.Entry<String, IdSet<Node>> e : nodes.entrySet()) {
Iterator<Id<Node>> it = e.getValue().iterator();
Id<Node> nodeId = it.next();
if(it.hasNext()) {
throw new RuntimeException("Node maps possible only with one node per location!");
}
nodeWeightMap.put(nodeId,weights.get(e.getKey()));
}
return nodeWeightMap;
}

private static int findPositionInArray (String string, String[] array) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,14 @@
import resources.Resources;

import org.apache.log4j.Logger;
import trads.PercentileCalculator;
import trip.Purpose;

import java.io.IOException;
import java.util.Map;
import java.util.Set;

public class RunAccessibility {
public class RunAnalysis {

public static final Logger log = Logger.getLogger(RunAccessibility.class);
public static final Logger log = Logger.getLogger(RunAnalysis.class);
private static Network fullNetwork;
private static Geometry regionBoundary;
private static Geometry networkBoundary;
Expand Down Expand Up @@ -86,7 +84,7 @@ private static void runAnalysis(String propertiesFilepath) throws IOException {
FeatureData features = new FeatureData(inputFilename);

// Parameters
DecayFunction df = getDecayFunctionFromProperties();
DecayFunction df = DecayFunctions.getFromProperties(network,networkBoundary);
boolean fwd = AccessibilityResources.instance.fwdCalculation();

// Checks on whether to perform ANY calculations
Expand All @@ -103,7 +101,8 @@ private static void runAnalysis(String propertiesFilepath) throws IOException {
return;
}
LocationData endData = new LocationData(endLocationsFilename,networkBoundary);
Map<String, IdSet<Node>> endNodes = endData.getNodes(network);
endData.estimateNetworkNodes(network);
Map<String, IdSet<Node>> endNodes = endData.getNodes();
Map<String, Double> endWeights = endData.getWeights();

// Accessibility calculation on NODES (if using polygons or node output requested)
Expand Down Expand Up @@ -140,75 +139,4 @@ private static void runAnalysis(String propertiesFilepath) throws IOException {
GisUtils.writeFeaturesToGpkg(features.getCollection(), features.getDescription() + "_result", outputFilename);
}
}

public static DecayFunction getDecayFunctionFromProperties() throws IOException {

// Decay function
String decayType = AccessibilityResources.instance.getString(AccessibilityProperties.DECAY_FUNCTION);
double cutoffTime = AccessibilityResources.instance.getDouble(AccessibilityProperties.CUTOFF_TIME);
double cutoffDist = AccessibilityResources.instance.getDouble(AccessibilityProperties.CUTOFF_DISTANCE);

if(decayType == null) {
log.warn("No decay function type specified.");
return null;
} else if (decayType.equalsIgnoreCase("exponential")) {
double beta = AccessibilityResources.instance.getDouble(AccessibilityProperties.BETA);
// Estimate from trads if beta not given
if(Double.isNaN(beta)) {
beta = estimateExpBetaFromTRADS();
}
log.info("Initialising exponential decay function with the following parameters:" +
"\nBeta: " + beta +
"\nTime cutoff (seconds): " + cutoffTime +
"\nDistance cutoff (meters): " + cutoffDist);
return new Exponential(beta,cutoffTime,cutoffDist);
} else if (decayType.equalsIgnoreCase("power")) {
double a = AccessibilityResources.instance.getDouble(AccessibilityProperties.A);
log.info("Initialising power decay function with the following parameters:" +
"\na: " + a +
"\nTime cutoff (seconds): " + cutoffTime +
"\nDistance cutoff (meters): " + cutoffDist);
return new Power(a,cutoffTime,cutoffDist);
} else if (decayType.equalsIgnoreCase("cumulative")) {
log.info("Initialising cumulative decay function with the following parameters:" +
"\nTime cutoff (seconds): " + cutoffTime +
"\nDistance cutoff (meters): " + cutoffDist);
return new Cumulative(cutoffTime, cutoffDist);
} else if (decayType.equalsIgnoreCase("gaussian")) {
double v = AccessibilityResources.instance.getDouble(AccessibilityProperties.V);
log.info("Initialising gaussian decay function with the following parameters:" +
"\nv: " + v +
"\nTime cutoff (seconds): " + cutoffTime +
"\nDistance cutoff (meters): " + cutoffDist);
return new Gaussian(v,cutoffTime,cutoffDist);
} else if (decayType.equalsIgnoreCase("cumulative gaussian")) {
double a = AccessibilityResources.instance.getDouble(AccessibilityProperties.A);
double v = AccessibilityResources.instance.getDouble(AccessibilityProperties.V);
log.info("Initialising cumulative gaussian decay function with the following parameters:" +
"\na: " + a +
"\nv: " + v +
"\nTime cutoff (seconds): " + cutoffTime +
"\nDistance cutoff (meters): " + cutoffDist);
return new CumulativeGaussian(a,v,cutoffTime,cutoffDist);
} else {
log.warn("Do not recognise decay function type \"" + decayType + "\"");
return null;
}
}

public static double estimateExpBetaFromTRADS() throws IOException {

String mode = AccessibilityResources.instance.getMode();
TravelTime tt = AccessibilityResources.instance.getTravelTime();
Vehicle veh = AccessibilityResources.instance.getVehicle();
TravelDisutility td = AccessibilityResources.instance.getTravelDisutility();
Purpose.PairList includedPurposePairs = AccessibilityResources.instance.getPurposePairs();

Network network = NetworkUtils2.extractModeSpecificNetwork(fullNetwork,mode);

log.info("Estimating exponential decay function using TRADS survey");
String outputCsv = AccessibilityResources.instance.getString(AccessibilityProperties.TRADS_OUTPUT_CSV);
return PercentileCalculator.estimateBeta(mode,veh,tt,td,includedPurposePairs,
network,network,networkBoundary,outputCsv);
}
}
Loading

0 comments on commit 9be6755

Please sign in to comment.