Skip to content

Commit

Permalink
Big refactoring and test of TF emulator
Browse files Browse the repository at this point in the history
  • Loading branch information
forefire committed Jul 25, 2024
1 parent efe6a04 commit a19038b
Show file tree
Hide file tree
Showing 73 changed files with 1,728 additions and 2,586 deletions.
28 changes: 3 additions & 25 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ apt install cmake -y

To install
- The C++ compiler
- [NetCDF Library](https://www.unidata.ucar.edu/software/netcdf/) and [NetCDF-C++ legacy](https://www.unidata.ucar.edu/downloads/netcdf/netcdf-cxx/index.jsp)
- [NetCDF Library](https://www.unidata.ucar.edu/software/netcdf/) and [NetCDF-C++ ](https://www.unidata.ucar.edu/downloads/netcdf/netcdf-cxx/index.jsp)
- [Cmake](https://cmake.org/) build tool

## 2. Build
Expand Down Expand Up @@ -95,30 +95,8 @@ The simulation result will be outputed in JSON format


### 4. Running with python

Installing requirements
```
cd py3_tools
pip install -r requirements.txt
```

You can use the script `coord_to_ff.py` to run the simulation in a default location

```
python coord_to_ff.py
```

For running in a chosen location, the script accepts latitude and longitude in epsg:4326 projection as inputs. It reprojects the coordinates into epsg:32632 projection, used in aullene's landscape.
```
python coord_to_ff.py --lat 41.6 --lon 9.1
```

The GeoJSON of geometry type Polygon will be saved in the `/examples/aullene` folder

## 4. Building python Lib
The `/swig` folder contains and `Sconstruct` file for python bindings.

Requires numpy (and numpy.i), swig, and matplotlib for testing.
Go and check [pyForeFire](https://github.com/forefireAPI/pyForeFire)
It may be included directly in this repo in futire releases

## 5. Building with Docker
A sample Dockerfile can allow to build a Docker image with
Expand Down
2 changes: 1 addition & 1 deletion examples/aullene/aullene.ff
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,5 @@ step[dt=199]
step[dt=2]
step[dt=12000]
print[./*count*-*ISOdate*.json]
save[]
#save[]
#print[]
10 changes: 5 additions & 5 deletions examples/aullene/aullene2.ff
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ setParameter[ForeFireDataDirectory=.]
setParameter[projection=EPSG:32632]
setParameter[fuelsTableFile=./fuels.ff]
setParameter[propagationModel=Rothermel]
setParameter[year=2022]
setParameter[month=10]
setParameter[day=27]
#setParameter[year=2022]
#setParameter[month=10]
#setParameter[day=27]
loadData[landscape.nc;2009-07-24T11:37:39Z]
startFire[loc=(516666.85406561225,4605385.548179354,0);t=0]
step[dt=12000s]
print[./*count*-*ISOdate*.ffgeojson]
print[]
#print[./*count*-*ISOdate*.ffgeojson]
print[]
238 changes: 238 additions & 0 deletions src/ANN.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
/*
Copyright (C) 2024 ForeFire Team, SPE, Universit� de Corse.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 US
*/

#ifndef ANN_H
#define ANN_H

#include <iostream>
#include <fstream>
#include <vector>
#include <cmath>
#include <random>
#include <chrono>
#include <cstring>
#include <sstream>
#include <stdexcept>

// Activation functions
float sigmoid(float x) {
return 1.0f / (1.0f + std::exp(-x));
}

float relu(float x) {
return std::max(0.0f, x);
}

float linear(float x) {
return x;
}

// Activation function lookup
float (*getActivationFunction(const std::string& name))(float) {
if (name == "RELU") return relu;
if (name == "SIGM") return sigmoid;
if (name == "LINE") return linear;
return nullptr;
}

// Layer structure
struct Layer {
std::vector<float> neurons;
std::vector<std::vector<float>> weights;
std::vector<float> biases;
float (*activation)(float);

// Constructor
Layer(int inputSize, int outputSize, float (*actFunc)(float))
: neurons(outputSize, 0.0f), weights(outputSize, std::vector<float>(inputSize)), biases(outputSize), activation(actFunc) {}



void loadWeightsAndBiases(const std::vector<float>& weightData, const std::vector<float>& biasData) {
for (size_t i = 0; i < weights.size(); ++i) {
for (size_t j = 0; j < weights[0].size(); ++j) {
weights[i][j] = weightData[j * weights.size() + i];
}
}
std::copy(biasData.begin(), biasData.end(), biases.begin());
}

std::vector<float> feedforward(const std::vector<float>& inputs) {
std::vector<float> output(neurons.size(), 0.0f);
for (size_t i = 0; i < neurons.size(); ++i) {
float neuronOutput = biases[i];
for (size_t j = 0; j < inputs.size(); ++j) {
neuronOutput += weights[i][j] * inputs[j];
}
output[i] = activation(neuronOutput);
}
return output;
}
};

// Network structure
struct Network {
std::vector<Layer> layers;
std::vector<std::string> inputNames;
std::vector<std::string> outputNames;

std::vector<std::string> splitNames(const std::string& names) {
std::vector<std::string> result;
std::istringstream iss(names);
std::string token;
while (getline(iss, token, ',')) {
result.push_back(token);
}
return result;
}

void loadFromFile(const char* filename) {
std::ifstream file(filename, std::ios::binary);
if (!file.is_open()) {
throw std::runtime_error("Failed to open network structure file.");
}

char header[9] = {0};
int numLayers;
file.read(header, 8);
file.read(reinterpret_cast<char*>(&numLayers), sizeof(int));

if (std::string(header) != "FFANN001") {
throw std::runtime_error("Invalid file format.");
}

int inputSize = 0; // This will be set based on the first layer's weight matrix dimensions

for (int i = 0; i < numLayers; ++i) {
char activation[5] = {0};
int width, height;
file.read(activation, 4);
file.read(reinterpret_cast<char*>(&width), sizeof(int));
file.read(reinterpret_cast<char*>(&height), sizeof(int));

if (i == 0) {
inputSize = width;
}

float (*actFunc)(float) = getActivationFunction(std::string(activation));
if (!actFunc) {
throw std::runtime_error("Unsupported activation function.");
}

Layer layer(inputSize, height, actFunc);
std::vector<float> weightData(height * width);
std::vector<float> biasData(height);
file.read(reinterpret_cast<char*>(weightData.data()), height * width * sizeof(float));
file.read(reinterpret_cast<char*>(biasData.data()), height * sizeof(float));

layer.loadWeightsAndBiases(weightData, biasData);
layers.push_back(layer);
inputSize = height;
}
// Read names and their lengths
/* int input_names_length, output_names_length;
file.read(reinterpret_cast<char*>(&input_names_length), sizeof(int));
std::string input_names(input_names_length, '\0');
file.read(&input_names[0], input_names_length);
file.read(reinterpret_cast<char*>(&output_names_length), sizeof(int));
std::string output_names(output_names_length, '\0');
file.read(&output_names[0], output_names_length);*/

int input_names_length, output_names_length;
std::string nameBuffer;
file.read(reinterpret_cast<char*>(&input_names_length), sizeof(int));
nameBuffer.resize(input_names_length);
file.read(&nameBuffer[0], input_names_length);
inputNames = splitNames(nameBuffer);

file.read(reinterpret_cast<char*>(&output_names_length), sizeof(int));
nameBuffer.resize(output_names_length);
file.read(&nameBuffer[0], output_names_length);
outputNames = splitNames(nameBuffer);

std::cout << "name sizes : " << input_names_length << " " << output_names_length << std::endl;


printLayerInfo();

}
void printLayerInfo() {
for (const auto& layer : layers) {
std::cout << "Layer: Input Size = " << layer.weights[0].size()
<< ", Output Size = " << layer.neurons.size()
<< ", Activation Function = ";

if (layer.activation == relu) {
std::cout << "ReLU";
} else if (layer.activation == sigmoid) {
std::cout << "Sigmoid";
} else {
std::cout << "Linear";
}

std::cout << std::endl; // End the line after printing each layer's details
}
}
std::vector<float> processInput(const std::vector<float>& input) {
std::vector<float> result = input;
for (auto& layer : layers) {
result = layer.feedforward(result);
}
return result;
}
// Assuming this method processes only one input set and expects exactly one output
bool processDirectInput(const std::vector<double>& inputs, double& output) {
std::vector<float> currentOutput(inputs.begin(), inputs.end());

for (auto& layer : layers) {
std::vector<float> nextOutput(layer.neurons.size(), 0.0f);

for (size_t i = 0; i < layer.neurons.size(); ++i) {
float neuronOutput = layer.biases[i];
for (size_t j = 0; j < currentOutput.size(); ++j) {
neuronOutput += layer.weights[i][j] * currentOutput[j];
}
nextOutput[i] = layer.activation(neuronOutput);
}

currentOutput = std::move(nextOutput);
}

// Assumes the final layer has exactly one output
if (currentOutput.size() == 1) {
output = static_cast<double>(currentOutput[0]);
return true;
}
return false;
}
std::string toString() const {
std::ostringstream ss;
for (const auto& layer : layers) {
ss << "Layer: Input Size = " << layer.weights[0].size() << ", Output Size = " << layer.neurons.size()
<< ", Activation Function = " << (layer.activation == relu ? "ReLU" :
layer.activation == sigmoid ? "Sigmoid" : "Linear") << "\n";
}
return ss.str();
}
};

#endif // ANN_H

Loading

0 comments on commit a19038b

Please sign in to comment.