Skip to content
Snippets Groups Projects
Commit f1b65336 authored by René Schöne's avatar René Schöne
Browse files

Rename and move model.Model in api to EncogModel.

parent d415c8a8
No related branches found
No related tags found
No related merge requests found
...@@ -8,7 +8,7 @@ import de.tudresden.inf.st.eraser.feedbackloop.api.Analyze; ...@@ -8,7 +8,7 @@ import de.tudresden.inf.st.eraser.feedbackloop.api.Analyze;
import de.tudresden.inf.st.eraser.feedbackloop.api.Execute; import de.tudresden.inf.st.eraser.feedbackloop.api.Execute;
import de.tudresden.inf.st.eraser.feedbackloop.api.Learner; import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
import de.tudresden.inf.st.eraser.feedbackloop.api.Plan; import de.tudresden.inf.st.eraser.feedbackloop.api.Plan;
import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model; import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
import de.tudresden.inf.st.eraser.feedbackloop.execute.ExecuteImpl; import de.tudresden.inf.st.eraser.feedbackloop.execute.ExecuteImpl;
import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerHelper; import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerHelper;
import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerImpl; import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerImpl;
...@@ -142,7 +142,7 @@ public class EraserStarter { ...@@ -142,7 +142,7 @@ public class EraserStarter {
// Model preference = learner.getTrainedModel(settings.preference.realURL(), settings.preference.id); // Model preference = learner.getTrainedModel(settings.preference.realURL(), settings.preference.id);
logger.debug("Loading of {} was {}", settings.preference.realURL(), loadingSuccessful ? "successful" : "failed"); logger.debug("Loading of {} was {}", settings.preference.realURL(), loadingSuccessful ? "successful" : "failed");
if (loadingSuccessful) { if (loadingSuccessful) {
Model preference = learner.getTrainedModel(settings.preference.id); EncogModel preference = learner.getTrainedModel(settings.preference.id);
NeuralNetworkRoot neuralNetwork = LearnerHelper.transform(preference); NeuralNetworkRoot neuralNetwork = LearnerHelper.transform(preference);
if (neuralNetwork == null) { if (neuralNetwork == null) {
logger.error("Could not create preference model, see possible previous errors."); logger.error("Could not create preference model, see possible previous errors.");
......
package de.tudresden.inf.st.eraser.feedbackloop.api.model; package de.tudresden.inf.st.eraser.feedbackloop.api;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
...@@ -18,7 +18,7 @@ import java.util.Objects; ...@@ -18,7 +18,7 @@ import java.util.Objects;
* */ * */
@Getter @Getter
@Setter @Setter
public class Model { public class EncogModel {
/** /**
* todo * todo
*/ */
...@@ -26,7 +26,7 @@ public class Model { ...@@ -26,7 +26,7 @@ public class Model {
private List<Double> weights; private List<Double> weights;
private List<Layer> layers; private List<Layer> layers;
public Model(String model) { public EncogModel(String model) {
modelType = model; modelType = model;
} }
......
...@@ -7,7 +7,6 @@ import java.util.List; ...@@ -7,7 +7,6 @@ import java.util.List;
import org.encog.util.arrayutil.NormalizedField; import org.encog.util.arrayutil.NormalizedField;
import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
import de.tudresden.inf.st.eraser.jastadd.model.Root; import de.tudresden.inf.st.eraser.jastadd.model.Root;
/** /**
...@@ -117,10 +116,10 @@ public interface Learner { ...@@ -117,10 +116,10 @@ public interface Learner {
* @param modelID - ID of the model of which information is requested from * @param modelID - ID of the model of which information is requested from
* @return Model - Object that contains the information of the requested model * @return Model - Object that contains the information of the requested model
* */ * */
Model getTrainedModel(int modelID); EncogModel getTrainedModel(int modelID);
@Deprecated @Deprecated
Model getTrainedModel(URL url, int modelID); EncogModel getTrainedModel(URL url, int modelID);
/** /**
* *
......
package de.tudresden.inf.st.eraser.feedbackloop.learner; package de.tudresden.inf.st.eraser.feedbackloop.learner;
import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model; import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
import de.tudresden.inf.st.eraser.jastadd.model.*; import de.tudresden.inf.st.eraser.jastadd.model.*;
import org.apache.commons.math3.stat.StatUtils; import org.apache.commons.math3.stat.StatUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
...@@ -17,7 +17,7 @@ import java.util.List; ...@@ -17,7 +17,7 @@ import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
* Transformation of a {@link Model} into a {@link NeuralNetworkRoot}. * Transformation of a {@link EncogModel} into a {@link NeuralNetworkRoot}.
* *
* @author rschoene - Initial contribution * @author rschoene - Initial contribution
*/ */
...@@ -30,14 +30,14 @@ public class LearnerHelper { ...@@ -30,14 +30,14 @@ public class LearnerHelper {
private static DoubleArrayDoubleFunction tanh = inputs -> Math.tanh(Arrays.stream(inputs).sum()); private static DoubleArrayDoubleFunction tanh = inputs -> Math.tanh(Arrays.stream(inputs).sum());
private static DoubleArrayDoubleFunction function_one = inputs -> 1.0; private static DoubleArrayDoubleFunction function_one = inputs -> 1.0;
public static NeuralNetworkRoot transform(Model model) { public static NeuralNetworkRoot transform(EncogModel encogModel) {
NeuralNetworkRoot result = NeuralNetworkRoot.createEmpty(); NeuralNetworkRoot result = NeuralNetworkRoot.createEmpty();
List<Double> weights = model.getWeights(); List<Double> weights = encogModel.getWeights();
logger.debug("Got {} weights", weights.size()); logger.debug("Got {} weights", weights.size());
List<List<Neuron>> allNeurons = new ArrayList<>(); List<List<Neuron>> allNeurons = new ArrayList<>();
// inputs // inputs
Layer inputLayer = model.getInputLayer(); Layer inputLayer = encogModel.getInputLayer();
reportLayer("input", inputLayer); reportLayer("input", inputLayer);
List<Neuron> inputNeurons = new ArrayList<>(); List<Neuron> inputNeurons = new ArrayList<>();
for (int i = 0; i < nonBiasNeuronCount(inputLayer); ++i) { for (int i = 0; i < nonBiasNeuronCount(inputLayer); ++i) {
...@@ -50,7 +50,7 @@ public class LearnerHelper { ...@@ -50,7 +50,7 @@ public class LearnerHelper {
// hidden layer // hidden layer
List<Neuron> currentNeurons; List<Neuron> currentNeurons;
for (Layer hiddenLayer : model.getHiddenLayers()) { for (Layer hiddenLayer : encogModel.getHiddenLayers()) {
reportLayer("one hidden", hiddenLayer); reportLayer("one hidden", hiddenLayer);
currentNeurons = new ArrayList<>(); currentNeurons = new ArrayList<>();
allNeurons.add(currentNeurons); allNeurons.add(currentNeurons);
...@@ -65,7 +65,7 @@ public class LearnerHelper { ...@@ -65,7 +65,7 @@ public class LearnerHelper {
// output layer // output layer
OutputLayer outputLayer = new OutputLayer(); OutputLayer outputLayer = new OutputLayer();
Layer modelOutputLayer = model.getOutputLayer(); Layer modelOutputLayer = encogModel.getOutputLayer();
reportLayer("output", modelOutputLayer); reportLayer("output", modelOutputLayer);
List<Neuron> outputNeurons = new ArrayList<>(); List<Neuron> outputNeurons = new ArrayList<>();
for (int i = 0; i < nonBiasNeuronCount(modelOutputLayer); ++i) { for (int i = 0; i < nonBiasNeuronCount(modelOutputLayer); ++i) {
......
package de.tudresden.inf.st.eraser.feedbackloop.learner; package de.tudresden.inf.st.eraser.feedbackloop.learner;
import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
import de.tudresden.inf.st.eraser.feedbackloop.api.Learner; import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
import de.tudresden.inf.st.eraser.jastadd.model.Root; import de.tudresden.inf.st.eraser.jastadd.model.Root;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
...@@ -170,17 +170,17 @@ public class LearnerImpl implements Learner { ...@@ -170,17 +170,17 @@ public class LearnerImpl implements Learner {
@Override @Override
public Model getTrainedModel(int modelID) { public EncogModel getTrainedModel(int modelID) {
return fillModel(modelID); return fillModel(modelID);
} }
@Override @Override
public Model getTrainedModel(URL url, int modelID) { public EncogModel getTrainedModel(URL url, int modelID) {
return fillModel(modelID); return fillModel(modelID);
} }
private Model fillModel(int modelID) { private EncogModel fillModel(int modelID) {
Model model = new Model("NN"); EncogModel encogModel = new EncogModel("NN");
BasicNetwork nn = models.get(modelID).getNetwork(); BasicNetwork nn = models.get(modelID).getNetwork();
ArrayList<Double> weightsList = new ArrayList<>(); ArrayList<Double> weightsList = new ArrayList<>();
...@@ -191,7 +191,7 @@ public class LearnerImpl implements Learner { ...@@ -191,7 +191,7 @@ public class LearnerImpl implements Learner {
weightsList.add(Double.valueOf(split[i])); weightsList.add(Double.valueOf(split[i]));
} }
model.setWeights(weightsList); encogModel.setWeights(weightsList);
// do not use getLayers() because it is not restored immediately on load from file // do not use getLayers() because it is not restored immediately on load from file
FlatNetwork flat = nn.getFlat(); FlatNetwork flat = nn.getFlat();
...@@ -205,9 +205,9 @@ public class LearnerImpl implements Learner { ...@@ -205,9 +205,9 @@ public class LearnerImpl implements Learner {
layers.add(0, l); layers.add(0, l);
} }
model.setLayers(layers); encogModel.setLayers(layers);
return model; return encogModel;
} }
......
package de.tudresden.inf.st.eraser.feedbackloop.learner; package de.tudresden.inf.st.eraser.feedbackloop.learner;
import de.tudresden.inf.st.eraser.feedbackloop.api.Learner; import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model; import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
import de.tudresden.inf.st.eraser.jastadd.model.*; import de.tudresden.inf.st.eraser.jastadd.model.*;
import org.apache.commons.math3.stat.StatUtils; import org.apache.commons.math3.stat.StatUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
...@@ -68,10 +68,10 @@ public class Main { ...@@ -68,10 +68,10 @@ public class Main {
NeuralNetworkRoot eraserModel = LearnerHelper.transform(learner.getTrainedModel(1)); NeuralNetworkRoot eraserModel = LearnerHelper.transform(learner.getTrainedModel(1));
} }
private static void printModel(Model model) { private static void printModel(EncogModel encogModel) {
logger.info("Model Type is: " + model.getModelType()); logger.info("Model Type is: " + encogModel.getModelType());
logger.info("Model Weights are: " + model.getWeights()); logger.info("Model Weights are: " + encogModel.getWeights());
logger.info("Model layers are: " + model.getLayers()); logger.info("Model layers are: " + encogModel.getLayers());
// logger.info("Model input normal neutrons: " + model.getInputLayerNumber()); // logger.info("Model input normal neutrons: " + model.getInputLayerNumber());
// logger.info("Model input bias neutron: " + model.getInputBias()); // logger.info("Model input bias neutron: " + model.getInputBias());
// logger.info("Model hidden normal neutrons: " + model.gethiddenLayerNumber()); // logger.info("Model hidden normal neutrons: " + model.gethiddenLayerNumber());
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment