Skip to content

Commit

Permalink
Merge pull request #158 from Samyssmile/feat/fractality
Browse files Browse the repository at this point in the history
Feat/fractality
  • Loading branch information
Samyssmile authored Jan 9, 2024
2 parents dc40661 + 3d94631 commit 51b6788
Show file tree
Hide file tree
Showing 34 changed files with 2,068 additions and 954 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,5 @@ gradle.properties
!**/src/main/**/gradle.properties

/benchmark-data/augmentation-benchmark-images/*
*.png
*.png
*.edux
3 changes: 0 additions & 3 deletions example/src/main/java/de/example/benchmark/Benchmark.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@
import de.edux.api.Classifier;
import de.edux.data.provider.DataProcessor;
import de.edux.data.reader.CSVIDataReader;
import de.edux.functions.activation.ActivationFunction;
import de.edux.functions.initialization.Initialization;
import de.edux.functions.loss.LossFunction;
import de.edux.ml.decisiontree.DecisionTree;
import de.edux.ml.knn.KnnClassifier;
import de.edux.ml.randomforest.RandomForest;
Expand Down
22 changes: 12 additions & 10 deletions example/src/main/java/de/example/mlp/MlpExampleOnMNIST.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import de.edux.ml.mlp.core.network.layers.DenseLayer;
import de.edux.ml.mlp.core.network.layers.ReLuLayer;
import de.edux.ml.mlp.core.network.layers.SoftmaxLayer;
import de.edux.ml.mlp.core.network.loader.image.ImageLoader;
import de.edux.ml.mlp.core.network.loader.Loader;
import de.edux.ml.mlp.core.network.loader.MetaData;
import de.edux.ml.mlp.core.network.loader.mnist.MnistLoader;
import java.io.File;

public class MlpExampleOnMNIST {
Expand Down Expand Up @@ -47,25 +47,27 @@ public static void main(String[] args) {

int batchSize = 100;
ExecutionMode singleThread = ExecutionMode.SINGLE_THREAD;
int epochs = 5;
int epochs = 100;
float initialLearningRate = 0.1f;
float finalLearningRate = 0.001f;
float finalLearningRate = 0.0001f;

Loader trainLoader = new ImageLoader(trainImages, trainLabels, batchSize);
Loader testLoader = new ImageLoader(testImages, testLabels, batchSize);
Loader trainLoader = new MnistLoader(trainImages, trainLabels, batchSize);
Loader testLoader = new MnistLoader(testImages, testLabels, batchSize);

MetaData trainMetaData = trainLoader.open();
int inputSize = trainMetaData.getInputSize();
int outputSize = trainMetaData.getExpectedSize();
int outputSize = trainMetaData.getNumberOfClasses();
trainLoader.close();

// Training from scratch
new NetworkBuilder()
.addLayer(new DenseLayer(inputSize, 128))
.addLayer(new DenseLayer(inputSize, 256))
.addLayer(new ReLuLayer())
.addLayer(new DenseLayer(128, 128))
.addLayer(new DenseLayer(256, 256))
.addLayer(new ReLuLayer())
.addLayer(new DenseLayer(128, outputSize))
.addLayer(new DenseLayer(256, 256))
.addLayer(new ReLuLayer())
.addLayer(new DenseLayer(256, outputSize))
.addLayer(new SoftmaxLayer())
.withBatchSize(batchSize)
.withLearningRates(initialLearningRate, finalLearningRate)
Expand All @@ -80,7 +82,7 @@ public static void main(String[] args) {
new NetworkBuilder()
.withExecutionMode(singleThread)
.withEpochs(5)
.withLearningRates(0.01f, 0.001f)
.withLearningRates(0.001f, 0.001f)
.loadModel("mnist_trained.edux")
.fit(trainLoader, testLoader);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ public void readBatchOfImages() throws Exception {
}
}

// Erwarteter Median
double[] numericValues =
Arrays.stream(largeDataset)
.filter(s -> !s.isBlank())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,13 @@ private boolean isNumeric(String value) {
return value.matches("-?\\d+(\\.\\d+)?") || value.isBlank();
}

double calculateMedian(String[] datasetColumn) {
double[] filteredDatasetColumnInNumbers = Arrays.stream(datasetColumn)
.filter(value -> !value.isBlank())
.mapToDouble(Double::parseDouble)
.sorted()
.toArray();
public double calculateMedian(String[] datasetColumn) {
double[] filteredDatasetColumnInNumbers =
Arrays.stream(datasetColumn)
.filter(value -> !value.isBlank())
.mapToDouble(Double::parseDouble)
.sorted()
.toArray();
if (filteredDatasetColumnInNumbers.length % 2 == 0) {
Double upper = filteredDatasetColumnInNumbers[filteredDatasetColumnInNumbers.length / 2];
Double lower =
Expand Down
78 changes: 0 additions & 78 deletions lib/src/main/java/de/edux/ml/mlp/core/network/BatchResult.java

This file was deleted.

16 changes: 5 additions & 11 deletions lib/src/main/java/de/edux/ml/mlp/core/network/Engine.java
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
package de.edux.ml.mlp.core.network;

import de.edux.api.Classifier;
import de.edux.ml.mlp.core.network.loss.LossFunction;
import de.edux.ml.mlp.core.network.loss.LossFunctions;
import de.edux.ml.mlp.core.tensor.Matrix;
import de.edux.ml.mlp.core.transformer.Transform;
import de.edux.ml.mlp.exceptions.UnsupportedLossFunction;
import java.io.Serializable;
import java.util.LinkedList;
Expand All @@ -14,23 +12,19 @@ public class Engine implements Layer, Serializable {
private static final long serialVersionUID = 1L;
private final LinkedList<Double> lossHistory = new LinkedList<>();
private final LinkedList<Double> accuracyHistory = new LinkedList<>();
private final LinkedList<Transform> transforms = new LinkedList<>();
private final LinkedList<Matrix> weights = new LinkedList<>();
private final LinkedList<Matrix> biases = new LinkedList<>();

private final LinkedList<Layer> layers = new LinkedList<>();

private final LossFunction lossFunction = LossFunction.CROSS_ENTROPY;

private transient RunningAverages runningAverages;
private int batchSize;

public Engine(int batchSize) {
this.batchSize = batchSize;
initAverageMetrics();
}

private int batchSize;

@Override
public Matrix backwardLayerBased(Matrix error, float learningRate) {
for (int i = layers.size() - 1; i >= 0; i--) {
Expand Down Expand Up @@ -108,10 +102,6 @@ public LinkedList<Double> getAccuracyHistory() {
return accuracyHistory;
}

public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}

@Override
public void updateWeightsAndBias() {
for (Layer layer : layers) {
Expand All @@ -122,4 +112,8 @@ public void updateWeightsAndBias() {
public int getBatchSize() {
return batchSize;
}

public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
}
34 changes: 17 additions & 17 deletions lib/src/main/java/de/edux/ml/mlp/core/network/NeuralNetwork.java
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package de.edux.ml.mlp.core.network;

import de.edux.api.Classifier;
import de.edux.ml.mlp.core.network.loader.BatchData;
import de.edux.ml.mlp.core.network.loader.Loader;
import de.edux.ml.mlp.core.network.loader.MetaData;
Expand All @@ -27,6 +26,21 @@ public class NeuralNetwork implements Serializable {
engine = new Engine(batchSize);
}

public static NeuralNetwork loadModel(String fileName) {
NeuralNetwork model = null;
File file = new File(fileName);
if (!file.exists()) {
return null;
}
try (var ds = new ObjectInputStream(new FileInputStream(file))) {
model = (NeuralNetwork) ds.readObject();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
log.info("Model loaded from {}", file.getAbsolutePath());
return model;
}

public void setLearningRates(float initialLearningRate, float finalLearningRate) {
this.initialLearningRate = initialLearningRate;
this.finalLearningRate = finalLearningRate;
Expand Down Expand Up @@ -64,7 +78,7 @@ private Matrix runBatch(Loader loader, boolean trainingMode) {
BatchData batchData = loader.readBatch();
int itemsRead = metaData.getItemsRead();
int inputSize = metaData.getInputSize();
int expectedSize = metaData.getExpectedSize();
int expectedSize = metaData.getNumberOfClasses();

Matrix input = new Matrix(inputSize, itemsRead, batchData.getInputBatch());
Matrix expected = new Matrix(expectedSize, itemsRead, batchData.getExpectedBatch());
Expand Down Expand Up @@ -111,6 +125,7 @@ private LinkedList<Future<Matrix>> createBatchTasks(Loader loader, boolean train
for (int i = 0; i < numberBatches; i++) {
batches.add(executor.submit(() -> runBatch(loader, trainingMode)));
}
loader.reset();

executor.shutdown();

Expand Down Expand Up @@ -149,21 +164,6 @@ public boolean saveModel(String fileName) {
return true;
}

public static NeuralNetwork loadModel(String fileName) {
NeuralNetwork model = null;
File file = new File(fileName);
if (!file.exists()) {
return null;
}
try (var ds = new ObjectInputStream(new FileInputStream(file))) {
model = (NeuralNetwork) ds.readObject();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
log.info("Model loaded from {}", file.getAbsolutePath());
return model;
}

public double[] predict(Matrix input) {
return engine.forwardLayerbased(input).getData();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
import java.util.concurrent.atomic.AtomicReference;

public class DenseLayer implements Layer {
private final Random random = new Random();
private AtomicReference<Matrix> weights;
private AtomicReference<Matrix> bias;
private final Random random = new Random();
private Matrix lastInput;

public DenseLayer(int inputSize, int outputSize) {
Expand Down Expand Up @@ -60,6 +60,6 @@ public Matrix backwardLayerBased(Matrix error, float learningRate) {

@Override
public String toString() {
return "DenseLayer in: " + weights.get().getCols() + " x out: " + weights.get().getRows();
return "Dense in: " + weights.get().getCols() + " x out: " + weights.get().getRows();
}
}
Loading

0 comments on commit 51b6788

Please sign in to comment.