diff --git a/eraser-base/src/main/jastadd/Imports.jadd b/eraser-base/src/main/jastadd/Imports.jadd
index 1c1a2e648f78bb681a54f23efa80e5febb59f013..62c746716028bef2ec86cdac52a91f14f4a6d391 100644
--- a/eraser-base/src/main/jastadd/Imports.jadd
+++ b/eraser-base/src/main/jastadd/Imports.jadd
@@ -1,5 +1,6 @@
 import java.util.*;
 import de.tudresden.inf.st.eraser.util.MemberPrinter;
+import de.tudresden.inf.st.eraser.util.JavaUtils;
 
 aspect Imports {
 
diff --git a/eraser-base/src/main/jastadd/Location.jrag b/eraser-base/src/main/jastadd/Location.jrag
new file mode 100644
index 0000000000000000000000000000000000000000..b61b260c998e15a89712e00f3e2c2a44eb84759b
--- /dev/null
+++ b/eraser-base/src/main/jastadd/Location.jrag
@@ -0,0 +1,10 @@
+aspect Location {
+  syn Optional<Location> Item.myLocation() {
+    if (this.hasLocation()) {
+      return Optional.of(this.getLocation());
+    } else {
+      return JavaUtils.ifPresentOrElseReturn(linkedThing(),
+          thing -> thing.hasLocation() ? Optional.of(thing.getLocation()) : Optional.empty(), () -> Optional.empty());
+    }
+  }
+}
diff --git a/eraser-base/src/main/jastadd/Location.relast b/eraser-base/src/main/jastadd/Location.relast
new file mode 100644
index 0000000000000000000000000000000000000000..3cdf199776adc6eaa575b31f867a16bae891e8d5
--- /dev/null
+++ b/eraser-base/src/main/jastadd/Location.relast
@@ -0,0 +1,3 @@
+Location ::= <Label:String> SubLocation:Location ;
+rel Location.Thing* <-> Thing.Location? ;
+rel Location.Item* <-> Item.Location? ;
diff --git a/eraser-base/src/main/jastadd/MachineLearning.jrag b/eraser-base/src/main/jastadd/MachineLearning.jrag
index f9c93d35bb011c113e6ce6847ab8395068bc5fce..339cbda94f53d5ea5808af34dfdb3ba79924193e 100644
--- a/eraser-base/src/main/jastadd/MachineLearning.jrag
+++ b/eraser-base/src/main/jastadd/MachineLearning.jrag
@@ -14,7 +14,7 @@ aspect MachineLearning {
   syn Leaf MachineLearningModel.classify();
 
   //--- currentActivityName ---
-  syn String Root.currentActivityName() = de.tudresden.inf.st.eraser.util.JavaUtils.ifPresentOrElseReturn(
+  syn String Root.currentActivityName() = JavaUtils.ifPresentOrElseReturn(
       currentActivity(),
       Activity::getLabel,
       () -> "no activity"
@@ -64,7 +64,7 @@ aspect MachineLearning {
   public void DummyMachineLearningModel.connectItems(List<String> itemNames) {
     logger.info("Storing items to connect");
     for (String itemName : itemNames) {
-      de.tudresden.inf.st.eraser.util.JavaUtils.ifPresentOrElse(getRoot().resolveItem(itemName),
+      JavaUtils.ifPresentOrElse(getRoot().resolveItem(itemName),
           this::addItem,
           () -> logger.warn("Could not resolve item '{}'", itemName));
     }
@@ -107,7 +107,7 @@ aspect MachineLearning {
   //--- ActivityItem ---
   @Override
   public double ActivityItem.getState() {
-    return de.tudresden.inf.st.eraser.util.JavaUtils.ifPresentOrElseReturn(
+    return JavaUtils.ifPresentOrElseReturn(
       getRoot().currentActivity(),
       activity -> (double) activity.getIdentifier(),
       () -> super.getState()
diff --git a/eraser-base/src/main/jastadd/MachineLearning.relast b/eraser-base/src/main/jastadd/MachineLearning.relast
index 6ae0837691870e12b5578ab21b417144859f0096..4f00aa9c0e67911fd8b485973b1fe3b3465e5374 100644
--- a/eraser-base/src/main/jastadd/MachineLearning.relast
+++ b/eraser-base/src/main/jastadd/MachineLearning.relast
@@ -49,6 +49,7 @@ InputNeuron : Neuron ;
 rel InputNeuron.Item -> Item ;
 
 HiddenNeuron : Neuron ::= <ActivationFormula:DoubleArrayDoubleFunction> ;
+BiasNeuron : HiddenNeuron ;
 OutputNeuron : HiddenNeuron ::= <Label:String> ;
 
 DummyMachineLearningModel : MachineLearningModel ::= Current:DecisionTreeLeaf ;
diff --git a/eraser-base/src/main/jastadd/Navigation.jrag b/eraser-base/src/main/jastadd/Navigation.jrag
index bb57ce614c40c68bf1afc4bc53680d3c9a3a8884..9aadd37e8d5ed064a1b28ede6244f5746030a652 100644
--- a/eraser-base/src/main/jastadd/Navigation.jrag
+++ b/eraser-base/src/main/jastadd/Navigation.jrag
@@ -116,10 +116,6 @@ aspect Navigation {
     return java.util.Optional.empty();
   }
 
-  //--- containingChannel ---
-  inh Channel Link.containingChannel();
-  eq Channel.getLink().containingChannel() = this;
-
   //--- containingThing ---
   inh Thing Channel.containingThing();
   eq Thing.getChannel().containingThing() = this;
@@ -128,6 +124,16 @@ aspect Navigation {
   inh NeuralNetworkRoot OutputLayer.containingNeuralNetwork();
   eq NeuralNetworkRoot.getOutputLayer().containingNeuralNetwork() = this;
 
+  //--- linkedThing ---
+  syn Optional<Thing> Item.linkedThing() {
+    if (!this.hasChannel()) {
+      return Optional.empty();
+    }
+    Channel channel = this.getChannel();
+    Thing thing = channel.containingThing();
+    return Optional.of(thing);
+  }
+
   //--- getRoot ---
   inh Root ASTNode.getRoot();
   eq Root.getChannelCategory().getRoot() = this;
diff --git a/eraser-base/src/main/jastadd/NeuralNetwork.jrag b/eraser-base/src/main/jastadd/NeuralNetwork.jrag
index a5d04cee57e017d037d3defb882dbf0560f1c0c1..d2953b92ad8ca32de17d7f1240b85d6a170c5687 100644
--- a/eraser-base/src/main/jastadd/NeuralNetwork.jrag
+++ b/eraser-base/src/main/jastadd/NeuralNetwork.jrag
@@ -57,7 +57,7 @@ aspect NeuralNetwork {
   //--- value ---
   syn double Neuron.value();
 
-  syn double HiddenNeuron.value() {
+  eq HiddenNeuron.value() {
     double[] inputs = new double[getInputs().size()];
     for (int i=0; i<inputs.length; ++i) {
       NeuronConnection connection = getInputList().get(i);
@@ -67,9 +67,11 @@ aspect NeuralNetwork {
     double result = getActivationFormula().apply(inputs);
 //    logger.debug("{}: {} -> {}", this, java.util.Arrays.toString(inputs), result);
     return result;
-    }
+  }
+
+  eq BiasNeuron.value() = 1;
 
-  syn double InputNeuron.value() {
+  eq InputNeuron.value() {
     return getItem().getStateAsDouble();
   }
 
@@ -98,7 +100,7 @@ aspect NeuralNetwork {
       }
       String itemName = itemNames.get(i);
       InputNeuron neuron = getInputNeuron(i);
-      de.tudresden.inf.st.eraser.util.JavaUtils.ifPresentOrElse(getRoot().resolveItem(itemName),
+      JavaUtils.ifPresentOrElse(getRoot().resolveItem(itemName),
           neuron::setItem,
           () -> logger.warn("Could not resolve item '{}'", itemName));
     }
@@ -165,6 +167,12 @@ aspect NeuralNetwork {
     return good;
   }
 
+  @Override
+  public boolean BiasNeuron.check() {
+    setActivationFormula(inputs -> 1.0);
+    return super.check();
+  }
+
   //--- mlKind ---
   inh String OutputLayer.mlKind();
   inh String Neuron.mlKind();
diff --git a/eraser-base/src/main/jastadd/Printing.jrag b/eraser-base/src/main/jastadd/Printing.jrag
index 3c463ac7c2a1964aa835cdb0404e87b2f61c1a9d..3bfa5280da744f15b42f0371ac3323aaac1b9623 100644
--- a/eraser-base/src/main/jastadd/Printing.jrag
+++ b/eraser-base/src/main/jastadd/Printing.jrag
@@ -160,7 +160,7 @@ aspect Printing {
     return new MemberPrinter("Channel")
         .addRequired("id", getID())
         .addRequired("type", getType(), ChannelType::getID)
-        .addIds("links", getNumLink(), getLinkList(), Link::getItem)
+        .addIds("links", getLinkedItems())
         .build();
   }
 
diff --git a/eraser-base/src/main/jastadd/main.relast b/eraser-base/src/main/jastadd/main.relast
index f9601870ae5ee144306040e355a5bdaf4017fd72..45dd621af675e7233511a2179baeb2fa54c29b25 100644
--- a/eraser-base/src/main/jastadd/main.relast
+++ b/eraser-base/src/main/jastadd/main.relast
@@ -1,6 +1,6 @@
 // ----------------    Main    ------------------------------
 Root ::= Thing* Group* ThingType* ChannelType* ChannelCategory* ItemCategory* User* MqttRoot InfluxRoot
-         MachineLearningRoot Rule* ;
+         MachineLearningRoot Rule* Location* ;
 
 // ----------------    Users   ------------------------------
 User : LabelledModelElement ;
diff --git a/eraser-base/src/main/jastadd/openhab.relast b/eraser-base/src/main/jastadd/openhab.relast
index e26d7b00b6cac0a01627cab3eb2f4ecbde9767ff..6f5b378edb8a57a61d07208e682440f8dcd5847c 100644
--- a/eraser-base/src/main/jastadd/openhab.relast
+++ b/eraser-base/src/main/jastadd/openhab.relast
@@ -16,10 +16,9 @@ abstract ChannelCategory ;
 DefaultChannelCategory : ChannelCategory ::= <Value:DefaultChannelCategoryValue> ;
 SimpleChannelCategory : ChannelCategory ::= <Value:String> ;
 
-Channel : ModelElement ::= Link* ;
+Channel : ModelElement ::= ;
 rel Channel.Type -> ChannelType ;
-
-Link ::= <Item:Item> ;
+rel Channel.LinkedItem* <-> Item.Channel? ;
 
 Parameter : DescribableModelElement ::= <Type:ParameterValueType> [DefaultValue:ParameterDefaultValue] <Context:String> <Required:boolean> ;
 ParameterDefaultValue ::= <Value:String> ;
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java
index 627167f83a7773e2923dc31508904405042bc25b..19f0dff885570597ecf0f05cf155d58f44d0d67c 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java
@@ -311,10 +311,8 @@ public class OpenHab2Importer {
 
   private void update(Root model, LinkData[] linkList) {
     for (LinkData linkData : linkList) {
-      Link link = new Link();
       ifPresent(model.resolveChannel(linkData.channelUID), "Channel", linkData,
-          channel -> channel.addLink(link));
-      ifPresent(model.resolveItem(linkData.itemName), "Item", linkData, link::setItem);
+          channel -> ifPresent(model.resolveItem(linkData.itemName), "Item", linkData, channel::addLinkedItem));
     }
   }
 
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java
index 2aa34c770542a34292550b1e046ba34d3c3cd7c0..ea392f2a9cd747b9f06a355e9648bc7d2a5b642d 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java
@@ -73,7 +73,7 @@ public class EraserParserHelper {
     this.root.getMqttRoot().ensureCorrectPrefixes();
 
     resolveList(channelMap, missingChannelListMap, Thing::addChannel);
-    resolveList(itemMap, missingItemLinkListMap, (channel, item) -> channel.addLink(new Link(item)));
+    resolveList(itemMap, missingItemLinkListMap, Channel::addLinkedItem);
     resolveList(groupMap, missingSubGroupListMap, Group::addGroup);
     resolveList(itemMap, missingItemListMap, this::addItemToGroup);
     resolveList(channelTypeMap, missingChannelTypeListMap, ThingType::addChannelType);
diff --git a/eraser-base/src/test/resources/openhabtest/oh2/links.json b/eraser-base/src/test/resources/openhabtest/oh2/links.json
index 79e79442806b87513d8581eb877453873069ddbe..f881db063fd53906ab14e3fda79dbf4a5fac0d4b 100644
--- a/eraser-base/src/test/resources/openhabtest/oh2/links.json
+++ b/eraser-base/src/test/resources/openhabtest/oh2/links.json
@@ -142,7 +142,7 @@
   {
     "channelUID": "openlicht:polar-m600:342dfc32:rotation-y",
     "configuration": {},
-    "itemName": "watch_acceleration_y"
+    "itemName": "watch_rotation_y"
   },
   {
     "channelUID": "openlicht:polar-m600:342dfc32:rotation-z",
diff --git a/eraser-base/src/test/resources/openhabtest/oh2/output.eraser b/eraser-base/src/test/resources/openhabtest/oh2/output.eraser
index c5e139935809383792596f6183ffe31514c8dbd0..45f5a63949ad5615ae7ae2bc11b8a7db3e384b1c 100644
--- a/eraser-base/src/test/resources/openhabtest/oh2/output.eraser
+++ b/eraser-base/src/test/resources/openhabtest/oh2/output.eraser
@@ -77,7 +77,7 @@ Channel: id="openlicht:polar-m600:342dfc32:activity" type="openlicht:activity-ty
 Channel: id="openlicht:polar-m600:342dfc32:brightness" type="openlicht:brightness-type" links=["polar_brightness"] ;
 Channel: id="openlicht:polar-m600:342dfc32:heart-rate" type="openlicht:heart-rate-type" ;
 Channel: id="openlicht:polar-m600:342dfc32:rotation-x" type="openlicht:rotation-type" links=["watch_rotation_x"] ;
-Channel: id="openlicht:polar-m600:342dfc32:rotation-y" type="openlicht:rotation-type" links=["watch_acceleration_y"] ;
+Channel: id="openlicht:polar-m600:342dfc32:rotation-y" type="openlicht:rotation-type" links=["watch_rotation_y"] ;
 Channel: id="openlicht:polar-m600:342dfc32:rotation-z" type="openlicht:rotation-type" links=["watch_rotation_z"] ;
 Channel: id="openlicht:polar-m600:342dfc32:steps" type="openlicht:steps-type" ;
 Channel: id="openlicht:samsung-s6:2ca84896:brightness" type="openlicht:brightness-type" links=["samsung_brightness"] ;
diff --git a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java
index b46e2325fa7aafd263a04e0098761c2d0bd61673..b207dc5fb16ff63b5601b3bab92a1b33b29c1241 100644
--- a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java
+++ b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java
@@ -10,6 +10,7 @@ import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
 import de.tudresden.inf.st.eraser.feedbackloop.api.Plan;
 import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
 import de.tudresden.inf.st.eraser.feedbackloop.execute.ExecuteImpl;
+import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerHelper;
 import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerImpl;
 import de.tudresden.inf.st.eraser.feedbackloop.plan.PlanImpl;
 import de.tudresden.inf.st.eraser.jastadd.model.DummyMachineLearningModel;
@@ -30,8 +31,10 @@ import org.apache.logging.log4j.Logger;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.Collections;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.Lock;
@@ -127,17 +130,33 @@ public class EraserStarter {
       logger.info("Reading preference learning from file {}", settings.preference.file);
       Learner learner = new LearnerImpl();
       // there should be a method to load a model using an URL
-      Model preference = learner.getTrainedModel(settings.preference.realURL(), settings.preference.id);
-      NeuralNetworkRoot neuralNetwork = LearnerHelper.transform(preference);
-      if (neuralNetwork == null) {
-        logger.error("Could not create preference model, see possible previous errors.");
+      boolean loadingSuccessful = false;
+      try (InputStream input = settings.preference.realURL().openStream()) {
+        loadingSuccessful = learner.loadModelFromFile(input, settings.preference.id,
+            Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
+      } catch (IOException e) {
+        logger.catching(e);
+        loadingSuccessful = false;
+      }
+//      Model preference = learner.getTrainedModel(settings.preference.realURL(), settings.preference.id);
+      logger.debug("Loading of {} was {}", settings.preference.realURL(), loadingSuccessful ? "successful" : "failed");
+      if (loadingSuccessful) {
+        Model preference = learner.getTrainedModel(settings.preference.id);
+        NeuralNetworkRoot neuralNetwork = LearnerHelper.transform(preference);
+        if (neuralNetwork == null) {
+          logger.error("Could not create preference model, see possible previous errors.");
+        } else {
+          model.getMachineLearningRoot().setPreferenceLearning(neuralNetwork);
+          neuralNetwork.setOutputApplication(zeroToThree -> 33 * zeroToThree);
+          JavaUtils.ifPresentOrElse(
+              model.resolveItem(settings.preference.affectedItem),
+              item -> neuralNetwork.getOutputLayer().setAffectedItem(item),
+              () -> logger.error("Output item not set from value '{}'", settings.preference.affectedItem));
+        }
       } else {
-        model.getMachineLearningRoot().setPreferenceLearning(neuralNetwork);
-        neuralNetwork.setOutputApplication(zeroToThree -> 33 * zeroToThree);
-        JavaUtils.ifPresentOrElse(
-            model.resolveItem(settings.preference.affectedItem),
-            item -> neuralNetwork.getOutputLayer().setAffectedItem(item),
-            () -> logger.error("Output item not set from value '{}'", settings.preference.affectedItem));
+        // loading was not successful
+        logger.warn("Falling back to dummy preference learning");
+        model.getMachineLearningRoot().setPreferenceLearning(DummyMachineLearningModel.createDefault());
       }
     }
     model.getMachineLearningRoot().getPreferenceLearning().connectItems(settings.preference.items);
diff --git a/eraser.starter/starter-setting.yaml b/eraser.starter/starter-setting.yaml
index b452c7cd017656e9c2890f99946060d713411990..f29763c43dfa4b789f872efc9eec88439930eff9 100644
--- a/eraser.starter/starter-setting.yaml
+++ b/eraser.starter/starter-setting.yaml
@@ -36,7 +36,7 @@ preference:
   # File to read in. Expected format = eg
   file: preference.eg
   # Use dummy model in which the current activity is directly editable. Default: false.
-  dummy: true
+  dummy: false
   # Model id. Default: 1.
   id: 1
   # Items to connect to inputs
@@ -45,8 +45,7 @@ preference:
     - datetime_day
     - datetime_hour
     - datetime_minute
-    - bias
-    - activity
+#    - activity
   # Item to change with classification result
   affectedItem: iris1_item
 
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java
index a5b28e14cc0b6200f11cb2da18a6bb3595c31d2b..16bcc040a22ee9b783efb8c7d474bd6b9926d943 100644
--- a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java
@@ -1,5 +1,7 @@
 package de.tudresden.inf.st.eraser.feedbackloop.api;
 
+import java.io.File;
+import java.io.InputStream;
 import java.net.URL;
 import java.util.List;
 
@@ -30,12 +32,27 @@ public interface Learner {
 	 * @return true - data set loading was successful
 	 * */
 	boolean loadDataSet(String dataSetName, List<Integer> targetColumns, int modelID);
-	
-	 /**
-     * Method for loading a neural network from a file. 
+
+	/**
+	 * Method for loading a neural network from a file.
+	 * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
+	 *
+	 * @param file       file to load the model from
+	 * @param modelID - ID of the BasicNetwork.
+	 * @param inputMaxes - list that contains max values of all input columns (sensors) e.g. light intensity 100
+	 * @param inputMins - list that contains min values of all input columns (sensors) e.g. light intensity 0
+	 * @param targetMaxes - list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning
+	 * @param targetMins - list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning
+	 * @return true - model loading was successful
+	 * */
+	boolean loadModelFromFile(File file, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+	                          List<Integer> targetMins);
+
+	/**
+     * Method for loading a neural network from an input stream.
      * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
-     * 
-     * @param path - path to the save folder of the model files e.g. C:\models\
+     *
+   	 * @param input       stream to load the model from
      * @param modelID - ID of the BasicNetwork.
      * @param inputMaxes - list that contains max values of all input columns (sensors) e.g. light intensity 100
      * @param inputMins - list that contains min values of all input columns (sensors) e.g. light intensity 0
@@ -43,8 +60,8 @@ public interface Learner {
      * @param targetMins - list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning 
      * @return true - model loading was successful
      * */
-	boolean loadModelFromFile(String path, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
-            List<Integer> targetMins);
+	boolean loadModelFromFile(InputStream input, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+	                          List<Integer> targetMins);
 	
 	/**
 	 * Method for the initial training of algorithms and models. That uses external data set for training.
@@ -102,6 +119,7 @@ public interface Learner {
 	 * */
 	Model getTrainedModel(int modelID);
 
+	@Deprecated
 	Model getTrainedModel(URL url, int modelID);
 	
 	/**
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningDecoder.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningDecoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..2c29aa28b92a5773167a98fa7bf3e40a8a538c95
--- /dev/null
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningDecoder.java
@@ -0,0 +1,28 @@
+package de.tudresden.inf.st.eraser.feedbackloop.api;
+
+import java.time.Instant;
+
+/**
+ * This interface represents the connection from a machine learning model back to the knowledge base.
+ * It decodes the output of the machine learning model and outputs the result of the classification.
+ *
+ * @author rschoene - Initial contribution
+ */
+@SuppressWarnings("unused")
+public interface MachineLearningDecoder {
+
+  /**
+   * Execute the machine learning model and returns the classification result.
+   * @return the result of the classification
+   */
+  MachineLearningResult classify();
+
+  // less important
+
+  /**
+   * Returns the time when the model was last updated, i.e., when the last training was completed.
+   * @return the time when the model was last updated, or <code>null</code> if the model was not trained yet
+   */
+  Instant lastModelUpdate();
+
+}
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningEncoder.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningEncoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..dff85f27acd5fd493f57c9aea35dd7cfd06a1e6c
--- /dev/null
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningEncoder.java
@@ -0,0 +1,48 @@
+package de.tudresden.inf.st.eraser.feedbackloop.api;
+
+import de.tudresden.inf.st.eraser.jastadd.model.Item;
+import de.tudresden.inf.st.eraser.jastadd.model.Root;
+
+import java.util.List;
+
+/**
+ * This interface represents the connection from knowledge base to one machine learning model.
+ * It takes information from the knowledge base, and encodes them to a representation that is readable both for
+ * the used technique and the purpose of the machine learning model.
+ *
+ * @author rschoene - Initial contribution
+ */
+@SuppressWarnings("unused")
+public interface MachineLearningEncoder {
+
+  /**
+   * Update when new data is available.
+   * @param model        The underlying model
+   * @param changedItems A list of items whose state has changed
+   */
+  void newData(Root model, List<Item> changedItems);
+
+  // to be discussed, in which form this is specified
+
+  /**
+   * Get the items that this model is supposed to change.
+   * @return the list of targeted items
+   */
+  List<Item> getTargets();
+
+  // to be discussed, in which form this is specified
+
+  /**
+   * Get the items which are relevant for the decision making of this model.
+   * @return the list of items relevant for decision making
+   */
+  List<Item> getRelevantItems();
+
+  // to be discussed, if this is necessary
+
+  /**
+   * Explicit hint for this model to start/trigger training. The model might ignore this hint.
+   */
+  void triggerTraining();
+
+}
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningResult.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningResult.java
new file mode 100644
index 0000000000000000000000000000000000000000..d57ccc815c6b463f84f7fc417b7da1b97a635eb2
--- /dev/null
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/MachineLearningResult.java
@@ -0,0 +1,25 @@
+package de.tudresden.inf.st.eraser.feedbackloop.api;
+
+import de.tudresden.inf.st.eraser.jastadd.model.ItemPreference;
+
+import java.util.List;
+
+/**
+ * Representation of a classification result using a MachineLearningModel.
+ *
+ * @author rschoene - Initial contribution
+ */
+@SuppressWarnings("unused")
+public interface MachineLearningResult {
+
+  // Object rawClass();
+
+  // double rawConfidence();
+
+  // can be used for both activity and preferences
+  /**
+   * Get the result as a list of item preferences, i.e., new states to be set for those items.
+   * @return the classification result as item preferences
+   */
+  List<ItemPreference> getPreferences();
+}
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java
index e9303a004d458f1eb84c2b8aa76043db35226af2..d3951ee8113ec4e5cbf1f22be493aef90a951ac9 100644
--- a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java
@@ -23,8 +23,8 @@ public class Model {
 	 * todo
 	 */
 	private String modelType;
-	private ArrayList<Double> weights;
-	private ArrayList<Layer> layers;
+	private List<Double> weights;
+	private List<Layer> layers;
 
 	public Model(String model) {
 		modelType = model;
diff --git a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/LearnerHelper.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerHelper.java
similarity index 68%
rename from eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/LearnerHelper.java
rename to feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerHelper.java
index 17ccf418c04abc53218e09be7bdd7385a836d024..c33928e0820cbab26f6a87b15fde5fa69f3b9a5b 100644
--- a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/LearnerHelper.java
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerHelper.java
@@ -1,4 +1,4 @@
-package de.tudresden.inf.st.eraser.starter;
+package de.tudresden.inf.st.eraser.feedbackloop.learner;
 
 import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
 import de.tudresden.inf.st.eraser.jastadd.model.*;
@@ -14,13 +14,14 @@ import org.encog.neural.networks.layers.Layer;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.stream.Collectors;
 
 /**
- * Transformation of a {@link Model} into a {@link MachineLearningModel}.
+ * Transformation of a {@link Model} into a {@link NeuralNetworkRoot}.
  *
  * @author rschoene - Initial contribution
  */
-class LearnerHelper {
+public class LearnerHelper {
 
   private static final Logger logger = LogManager.getLogger(LearnerHelper.class);
 
@@ -29,44 +30,45 @@ class LearnerHelper {
   private static DoubleArrayDoubleFunction tanh = inputs -> Math.tanh(Arrays.stream(inputs).sum());
   private static DoubleArrayDoubleFunction function_one = inputs -> 1.0;
 
-  static NeuralNetworkRoot transform(Model model) {
+  public static NeuralNetworkRoot transform(Model model) {
     NeuralNetworkRoot result = NeuralNetworkRoot.createEmpty();
-    ArrayList<Double> weights = model.getWeights();
+    List<Double> weights = model.getWeights();
+    logger.debug("Got {} weights", weights.size());
 
     List<List<Neuron>> allNeurons = new ArrayList<>();
     // inputs
     Layer inputLayer = model.getInputLayer();
+    reportLayer("input", inputLayer);
     List<Neuron> inputNeurons = new ArrayList<>();
-    for (int i = 0; i < inputLayer.getNeuronCount(); ++i) {
+    for (int i = 0; i < nonBiasNeuronCount(inputLayer); ++i) {
       InputNeuron inputNeuron = new InputNeuron();
       result.addInputNeuron(inputNeuron);
       inputNeurons.add(inputNeuron);
     }
-    InputNeuron bias = null;
-    if (inputLayer.hasBias()) {
-      bias = new InputNeuron();
-      result.addInputNeuron(bias);
-      inputNeurons.add(bias);
-    }
+    addBiasIfNeeded(inputLayer, result.getHiddenNeuronList(), inputNeurons);
     allNeurons.add(inputNeurons);
 
     // hidden layer
     List<Neuron> currentNeurons;
     for (Layer hiddenLayer : model.getHiddenLayers()) {
+      reportLayer("one hidden", hiddenLayer);
       currentNeurons = new ArrayList<>();
       allNeurons.add(currentNeurons);
-      for (int i = 0; i < hiddenLayer.getNeuronCount(); ++i) {
+      for (int i = 0; i < nonBiasNeuronCount(hiddenLayer); ++i) {
         HiddenNeuron hiddenNeuron = new HiddenNeuron();
         setActivationFunction(hiddenNeuron, hiddenLayer.getActivationFunction());
+        result.addHiddenNeuron(hiddenNeuron);
         currentNeurons.add(hiddenNeuron);
       }
+      addBiasIfNeeded(hiddenLayer, result.getHiddenNeuronList(), currentNeurons);
     }
 
     // output layer
     OutputLayer outputLayer = new OutputLayer();
     Layer modelOutputLayer = model.getOutputLayer();
+    reportLayer("output", modelOutputLayer);
     List<Neuron> outputNeurons = new ArrayList<>();
-    for (int i = 0; i < modelOutputLayer.getNeuronCount(); ++i) {
+    for (int i = 0; i < nonBiasNeuronCount(modelOutputLayer); ++i) {
       OutputNeuron outputNeuron = new OutputNeuron();
       setActivationFunction(outputNeuron, modelOutputLayer.getActivationFunction());
       outputLayer.addOutputNeuron(outputNeuron);
@@ -74,6 +76,10 @@ class LearnerHelper {
     }
     result.setOutputLayer(outputLayer);
     allNeurons.add(outputNeurons);
+    logger.debug("Created a total of {} neurons",
+        allNeurons.stream()
+            .map(list -> Integer.toString(list.size()))
+            .collect(Collectors.joining("+")));
 
     // set weights from back to front, and from top to bottom
     int weightIndex = 0;
@@ -82,10 +88,16 @@ class LearnerHelper {
       List<Neuron> leftList = allNeurons.get(layer - 1);
       for (int rightIndex = 0; rightIndex < rightList.size(); rightIndex++) {
         for (int leftIndex = 0; leftIndex < leftList.size(); leftIndex++) {
+          if (rightList.get(rightIndex) instanceof BiasNeuron) {
+            continue;
+          }
           leftList.get(leftIndex).connectTo(rightList.get(rightIndex), weights.get(weightIndex++));
         }
       }
     }
+    if (weightIndex != weights.size()) {
+      logger.error("No all weights used (only {} of {}). Loaded wrong model!", weightIndex, weights.size());
+    }
 
     outputLayer.setCombinator(LearnerHelper::predictor);
     logger.info("Created model with {} input, {} hidden and {} output neurons",
@@ -93,15 +105,23 @@ class LearnerHelper {
     return result;
   }
 
-  private static void setActivationFunction(HiddenNeuron neuron, String functionName) {
-    switch (functionName) {
-      case "ActivationTANH": neuron.setActivationFormula(tanh); break;
-      case "ActivationLinear": neuron.setActivationFormula(function_one);
-      case "ActivationSigmoid": neuron.setActivationFormula(sigmoid); break;
-      default: throw new IllegalArgumentException("Unknown function " + functionName);
+  private static void addBiasIfNeeded(Layer layer, JastAddList<HiddenNeuron> neuronList, List<Neuron> localNeuronList) {
+    if (layer.hasBias()) {
+      BiasNeuron bias = new BiasNeuron();
+      neuronList.add(bias);
+      localNeuronList.add(bias);
     }
   }
 
+  private static int nonBiasNeuronCount(Layer layer) {
+    return layer.getNeuronCount() - (layer.hasBias() ? 1 : 0);
+  }
+
+  private static void reportLayer(String name, Layer layer) {
+    logger.debug("{} layer has {} neurons {}",
+        name, layer.getNeuronCount(), layer.hasBias() ? "(including bias)" : "");
+  }
+
   private static void setActivationFunction(HiddenNeuron neuron, ActivationFunction function) {
     if (function instanceof ActivationTANH) {
       neuron.setActivationFormula(tanh);
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java
index 1b66d19ef7eb3e3a7c2e96b8d99668dd4a41fa7c..8cd30d357c56ae197c125179f42033fc8b7e865c 100644
--- a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java
@@ -13,13 +13,12 @@ import org.encog.util.arrayutil.NormalizedField;
 import org.encog.util.csv.CSVFormat;
 import org.encog.util.csv.ReadCSV;
 
+import java.io.File;
+import java.io.InputStream;
 import java.net.URL;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 /**
  * Implementation of the Learner.
@@ -66,10 +65,18 @@ public class LearnerImpl implements Learner {
   }
 
   @Override
-  public boolean loadModelFromFile(String path, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+  public boolean loadModelFromFile(File file, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
                                    List<Integer> targetMins) {
-    logger.debug("Load model from file {}", path);
-    models.put(modelID, new Network(path, modelID, inputMaxes, inputMins, targetMaxes, targetMins));
+    logger.debug("Load model from file {}", file);
+    models.put(modelID, new Network(file.getAbsolutePath(), modelID, inputMaxes, inputMins, targetMaxes, targetMins));
+    return true;
+  }
+
+  @Override
+  public boolean loadModelFromFile(InputStream input, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                                   List<Integer> targetMins) {
+    logger.debug("Load model from input stream");
+    models.put(modelID, new Network(input, modelID, inputMaxes, inputMins, targetMaxes, targetMins));
     return true;
   }
 
@@ -186,15 +193,16 @@ public class LearnerImpl implements Learner {
 
     model.setWeights(weightsList);
 
-    ArrayList<Layer> layers = new ArrayList<>();
+    // do not use getLayers() because it is not restored immediately on load from file
     FlatNetwork flat = nn.getFlat();
-    // do not use get Layers because is not restored immediatly on load from file 
+    List<Layer> layers = new ArrayList<>(flat.getLayerCounts().length);
+    logger.debug("layer counts: {}", Arrays.toString(flat.getLayerCounts()));
     for (int j = 0; j < flat.getLayerCounts().length; j++) {
-      boolean hasBias = true;
-      if( j==0 || j==flat.getLayerCounts().length-1) hasBias = false;
+//      boolean hasBias = j != 0 && j != flat.getLayerCounts().length - 1;
+      boolean hasBias = flat.getLayerCounts()[j] != flat.getLayerFeedCounts()[j];
       Layer l = new BasicLayer(flat.getActivationFunctions()[j], hasBias, flat.getLayerCounts()[j]);
       l.setBiasActivation(flat.getBiasActivation()[j]);
-      layers.add(l);
+      layers.add(0, l);
     }
 
     model.setLayers(layers);
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java
index b0b9f41089c49e5a2d2f9349f5d6cab2439f8740..19765c1e6df76fb2e2701b3a5eebb2e302728b6e 100644
--- a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java
@@ -1,23 +1,19 @@
 package de.tudresden.inf.st.eraser.feedbackloop.learner;
 
 import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
-import de.tudresden.inf.st.eraser.feedbackloop.api.model.*;
+import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
 import de.tudresden.inf.st.eraser.jastadd.model.*;
 import org.apache.commons.math3.stat.StatUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
-import org.encog.ml.data.MLData;
-import org.encog.ml.data.versatile.NormalizationHelper;
 
-import java.nio.file.Path;
+import java.io.File;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
-import java.util.stream.Collectors;
 
 @SuppressWarnings("unused")
 public class Main {
@@ -65,10 +61,11 @@ public class Main {
 	private static void loadFromEncog() {
 		Learner learner = new LearnerImpl();
 		learner.loadModelFromFile(
-				InitialDataConfig.encog_filename, 1,
+				new File(InitialDataConfig.encog_filename), 1,
 				InitialDataConfig.inputMaxes, InitialDataConfig.inputMins,
 				InitialDataConfig.targetMaxes, InitialDataConfig.targetMins);
 		printModel(learner.getTrainedModel(1));
+    NeuralNetworkRoot eraserModel = LearnerHelper.transform(learner.getTrainedModel(1));
 	}
 
 	private static void printModel(Model model) {
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java
index c43bf549faf2d7430a5a869e0dcbf17aa70cc11d..e3467c7fda1544befee165d0316f11e9b35e8d27 100644
--- a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java
@@ -1,6 +1,8 @@
 package de.tudresden.inf.st.eraser.feedbackloop.learner;
 
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -40,8 +42,8 @@ public class Network {
    * @param modelID           ID of the BasicNetwork.
    * @param inputMaxes        list that contains max values of all input columns (sensors) e.g. light intensity 100
    * @param inputMins         list that contains min values of all input columns (sensors) e.g. light intensity 0
-   * @param targetMaxes       list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning
-   * @param targetMins        list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning
+   * @param targetMaxes       list that contains max values of all output columns (results) e.g. brightness 100 for preference learning
+   * @param targetMins        list that contains min values of all output columns (results) e.g. brightness 0 for preference learning
    */
   public Network(int inputCount, int outputCount, int hiddenCount, int hiddenNeuronCount, int modelID,
                  List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
@@ -83,22 +85,48 @@ public class Network {
    * @param modelID     ID of the BasicNetwork.
    * @param inputMaxes  list that contains max values of all input columns (sensors) e.g. light intensity 100
    * @param inputMins   list that contains min values of all input columns (sensors) e.g. light intensity 0
-   * @param targetMaxes list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning
-   * @param targetMins  list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning
+   * @param targetMaxes list that contains max values of all output columns (results) e.g. brightness 100 for preference learning
+   * @param targetMins  list that contains min values of all output columns (results) e.g. brightness 0 for preference learning
    */
   public Network(String path, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
                  List<Integer> targetMins) {
+    this(() -> (BasicNetwork) EncogDirectoryPersistence.loadObject(new File(path, "NN_" + modelID)), modelID, inputMaxes, inputMins, targetMaxes, targetMins);
+  }
+
+  /**
+   * Constructor for when the neural network is loaded from an input stream.
+   * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
+   *
+   * @param input       stream to load the model from
+   * @param modelID     ID of the BasicNetwork.
+   * @param inputMaxes  list that contains max values of all input columns (sensors) e.g. light intensity 100
+   * @param inputMins   list that contains min values of all input columns (sensors) e.g. light intensity 0
+   * @param targetMaxes list that contains max values of all output columns (results) e.g. brightness 100 for preference learning
+   * @param targetMins  list that contains min values of all output columns (results) e.g. brightness 0 for preference learning
+   */
+  public Network(InputStream input, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                 List<Integer> targetMins) {
+    this(() -> (BasicNetwork) EncogDirectoryPersistence.loadObject(input), modelID, inputMaxes, inputMins, targetMaxes, targetMins);
+  }
+
+  private Network(LoadEncogModel loader, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                 List<Integer> targetMins) {
     this.modelID = modelID;
 
     normalizersIn = new ArrayList<>();
     normalizersTar = new ArrayList<>();
 
-    network = (BasicNetwork) EncogDirectoryPersistence.loadObject(new File(path + "NN_" + modelID));
+    network = loader.load();
 
     addNormalizer(inputMaxes, inputMins, normalizersIn);
     addNormalizer(targetMaxes, targetMins, normalizersTar);
   }
 
+  @FunctionalInterface
+  interface LoadEncogModel {
+    BasicNetwork load();
+  }
+
   /**
    * Method to save the trained {@link BasicNetwork} to a file.
    * File name is always NN_modelID