diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7e77a76053345e34f70811b6498a8758dbf7d305..918f65ab8164ff97f01d7e37dc1fa86b7bfd3766 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,15 +4,23 @@ stages:
 - report
 
 variables:
+  # Instruct Testcontainers to use the daemon of DinD.
+  DOCKER_HOST: "unix:///var/run/docker.sock"
+#  # Improve performance with overlayfs.
+#  DOCKER_DRIVER: overlay2
   GRADLE_OPTS: "-Dorg.gradle.daemon=false"
   TEST_REPORTS: "/builds/OpenLicht/eraser/eraser-base/build/reports/tests/test/"
+  TEST_LOG: "/builds/OpenLicht/eraser/eraser-base/logs/eraser-test.log"
   JACOCO_REPORT: "/builds/OpenLicht/eraser/eraser-base/build/reports/jacoco/test/jacocoTestReport.xml"
+  TESTCONTAINERS_RYUK_DISABLED: "true"
 
 before_script:
   - export GRADLE_USER_HOME=`pwd`/.gradle
 
 build:
-  image: gradle:jdk8
+  image: openjdk:8
+  tags:
+    - docker
   stage: build
   script:
     - ./gradlew --console=plain --build-cache assemble
@@ -24,7 +32,9 @@ build:
       - .gradle
 
 test:
-  image: gradle:jdk8
+  image: openjdk:8
+  tags:
+    - docker
   stage: test
   script:
     - ./gradlew --continue --console=plain check jacocoTestReport
@@ -37,11 +47,14 @@ test:
   artifacts:
     when: always
     paths:
+      - $TEST_LOG
       - $TEST_REPORTS
       - $JACOCO_REPORT
 
 coverage:
   image: python:3.7.1-alpine
+  tags:
+    - docker
   stage: report
   dependencies:
   - test
diff --git a/build.gradle b/build.gradle
index 7348f5134a49d4e0231a68894601c7c8684af1d6..60b72b700ec5d34db48006da121885e5adc6b4d4 100644
--- a/build.gradle
+++ b/build.gradle
@@ -31,4 +31,12 @@ subprojects {
 	artifacts {
 		testArtifacts testJar
 	}
+
+	dependencies {
+		compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.2'
+		compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.2'
+		testCompile group: 'junit', name: 'junit', version: '4.12'
+		testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
+	}
+
 }
diff --git a/commons.color/build.gradle b/commons.color/build.gradle
index 135a0194474bfeab48418cd7589d117a4f360805..b94df8ebb2332f7ee82611202e287541a285d86b 100644
--- a/commons.color/build.gradle
+++ b/commons.color/build.gradle
@@ -7,12 +7,7 @@ apply plugin: 'java'
 sourceCompatibility = 1.8
 
 dependencies {
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    compile 'org.apache.commons:commons-math3:3.6.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
+    compile group: 'org.apache.commons', name: 'commons-math3', version: '3.6.1'
 }
 
 sourceSets {
diff --git a/eraser-base/build.gradle b/eraser-base/build.gradle
index 3a10d245278fb767c332deca63785bfb0c60a270..78903e66d8d681ff431c671c6e17c75145bfdce4 100644
--- a/eraser-base/build.gradle
+++ b/eraser-base/build.gradle
@@ -5,23 +5,24 @@ repositories {
 apply plugin: 'jastadd'
 apply plugin: 'application'
 apply plugin: 'jacoco'
+apply plugin: 'idea'
+apply plugin: 'distribution'
 
 dependencies {
     compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
     compile group: 'net.sf.beaver', name: 'beaver-rt', version: '0.9.11'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    compile 'org.fusesource.mqtt-client:mqtt-client:1.14'
-    compile 'org.apache.commons:commons-math3:3.6.1'
-    compile 'org.influxdb:influxdb-java:2.14'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
+    compile group: 'org.fusesource.mqtt-client', name: 'mqtt-client', version: '1.15'
+    compile group: 'org.influxdb', name: 'influxdb-java', version: '2.15'
+    testCompile group: 'org.testcontainers', name: 'testcontainers', version: '1.11.2'
+    testCompile group: 'org.testcontainers', name: 'influxdb', version: '1.11.2'
+    testCompile group: 'org.apache.logging.log4j', name: 'log4j-slf4j-impl', version: '2.11.2'
 }
 
 buildscript {
     repositories.mavenLocal()
     repositories.mavenCentral()
     dependencies {
-        classpath 'org.jastadd:jastaddgradle:1.13.2'
+        classpath group: 'org.jastadd', name: 'jastaddgradle', version: '1.13.2'
     }
 }
 
@@ -47,18 +48,22 @@ jacocoTestReport {
     }
 }
 
+def relastFiles = fileTree('src/main/jastadd/') {
+    include '**/*.relast' }.toList().toArray()
+String[] relastArguments = [
+        "libs/relast.jar",
+        "--grammarName=./src/main/jastadd/mainGen",
+        "--useJastAddNames",
+        "--listClass=RefList",
+        "--jastAddList=JastAddList",
+        "--file"
+]
 task preprocess(type: JavaExec) {
     group = 'Build'
     main = "-jar"
-    args = [
-            "libs/relast-compiler.jar",
-            "./src/main/jastadd/main.relast",
-            "--listClass=RefList",
-            "--jastAddList=JastAddList",
-            "--file"
-    ]
-
-    inputs.files file("./src/main/jastadd/main.relast")
+    args relastArguments + relastFiles
+
+    inputs.files relastFiles
     outputs.files file("./src/main/jastadd/mainGen.ast"), file("./src/main/jastadd/mainGen.jadd")
 }
 
@@ -79,6 +84,12 @@ jastadd {
     parser.genDir = "src/gen/java/de/tudresden/inf/st/eraser/jastadd/parser"
 }
 
+idea {
+    module {
+        generatedSourceDirs += file('src/gen/java')
+    }
+}
+
 sourceSets.main {
     java {
         srcDir 'src/gen/java'
diff --git a/eraser-base/libs/relast-compiler.jar b/eraser-base/libs/relast-compiler.jar
deleted file mode 100644
index a1a9b656a4eb4f990decbcb8a522b063332133bf..0000000000000000000000000000000000000000
Binary files a/eraser-base/libs/relast-compiler.jar and /dev/null differ
diff --git a/eraser-base/libs/relast.jar b/eraser-base/libs/relast.jar
new file mode 100644
index 0000000000000000000000000000000000000000..9b52866a399cfdd0c4fe29c102125beab5d77989
Binary files /dev/null and b/eraser-base/libs/relast.jar differ
diff --git a/eraser-base/src/main/jastadd/AdditionalTypes.jadd b/eraser-base/src/main/jastadd/AdditionalTypes.jadd
index bd81b87f0419f275d10c95a09a9c4205999e47bc..32651117408970af5b3a57f4ddc48e2f7f2a0d8c 100644
--- a/eraser-base/src/main/jastadd/AdditionalTypes.jadd
+++ b/eraser-base/src/main/jastadd/AdditionalTypes.jadd
@@ -11,15 +11,19 @@ aspect AdditionalTypes {
     }
   }
 
-  public class StringMap extends beaver.Symbol implements Iterable<AbstractMap.SimpleEntry<String, String>> {
-    private java.util.Deque<AbstractMap.SimpleEntry<String, String>> delegatee = new java.util.ArrayDeque<>();
+  public class TypedKeyMap<T> extends beaver.Symbol implements Iterable<AbstractMap.SimpleEntry<T, String>> {
+    private java.util.Deque<AbstractMap.SimpleEntry<T, String>> delegatee = new java.util.ArrayDeque<>();
 
-    public java.util.Iterator<AbstractMap.SimpleEntry<String, String>> iterator() {
+    public java.util.Iterator<AbstractMap.SimpleEntry<T, String>> iterator() {
       return delegatee.descendingIterator();
     }
 
-    public void put(String key, String value) {
+    public void put(T key, String value) {
       delegatee.add(new AbstractMap.SimpleEntry<>(key, value));
     }
   }
+
+  public class StringKeyMap extends TypedKeyMap<String> { }
+
+  public class IntegerKeyMap extends TypedKeyMap<Integer> { }
 }
diff --git a/eraser-base/src/main/jastadd/DecisionTree.jrag b/eraser-base/src/main/jastadd/DecisionTree.jrag
index 84894e304b8573a1997018d7977fca8aba55ce7e..f50ca0562a920844e46a838c0f655f97bf804a01 100644
--- a/eraser-base/src/main/jastadd/DecisionTree.jrag
+++ b/eraser-base/src/main/jastadd/DecisionTree.jrag
@@ -4,20 +4,20 @@ aspect DecisionTree {
   public class DecisionTreeLeaf implements Leaf { }
 
   //--- classify ---
-  syn Leaf DecisionTreeRoot.classify() {
+  syn DecisionTreeLeaf DecisionTreeRoot.classify() {
     return getRootRule().classify();
   }
 
-  syn Leaf DecisionTreeElement.classify();
+  syn DecisionTreeLeaf DecisionTreeElement.classify();
 
-  syn Leaf DecisionTreeRule.classify();
+  syn DecisionTreeLeaf DecisionTreeRule.classify();
 
-  syn Leaf ItemStateCheckRule.classify() {
+  syn DecisionTreeLeaf ItemStateCheckRule.classify() {
     boolean chooseLeft = getItemStateCheck().holds();
     return (chooseLeft ? getLeft() : getRight()).classify();
   }
 
-  syn Leaf DecisionTreeLeaf.classify() = this;
+  syn DecisionTreeLeaf DecisionTreeLeaf.classify() = this;
 
   //--- holds ---
   syn boolean ItemStateCheck.holds() = holdsFor(getItem());
@@ -46,6 +46,27 @@ aspect DecisionTree {
     return false;
   }
 
+  //--- computePreferences ---
+  syn List<ItemPreference> DecisionTreeLeaf.computePreferences() {
+    // iterate over preference of this leaf, and all its parents and ancestors
+    List<ItemPreference> result = new ArrayList<>();
+    Set<Item> seenItems = new HashSet<>();
+    List<DecisionTreeElement> ancestors = ancestors();
+    for (ItemPreference pref : getPreferenceList()) {
+      result.add(pref);
+      seenItems.add(pref.getItem());
+    }
+    for (DecisionTreeElement ancestor : ancestors) {
+      for (ItemPreference pref : ancestor.getPreferenceList()) {
+        if (!seenItems.contains(pref.getItem())) {
+          result.add(pref);
+          seenItems.add(pref.getItem());
+        }
+      }
+    }
+    return result;
+  }
+
   //--- ancestors ---
   inh List<DecisionTreeElement> DecisionTreeElement.ancestors();
   eq DecisionTreeRule.getLeft().ancestors() {
@@ -59,6 +80,7 @@ aspect DecisionTree {
     return result;
   }
   eq DecisionTreeRoot.getRootRule().ancestors() = new ArrayList();
+  eq DummyMachineLearningModel.getCurrent().ancestors() = Collections.emptyList();
 
   public void DecisionTreeRoot.connectItems(List<String> itemNames) {
     // TODO walk through the tree using depth-first-search
diff --git a/eraser-base/src/main/jastadd/DecisionTree.relast b/eraser-base/src/main/jastadd/DecisionTree.relast
new file mode 100644
index 0000000000000000000000000000000000000000..17a1e65c1c6cd12ba2d49eeceba1eb920dae31bd
--- /dev/null
+++ b/eraser-base/src/main/jastadd/DecisionTree.relast
@@ -0,0 +1,12 @@
+// ----------------    Decision Tree    ------------------------------
+DecisionTreeRoot : InternalMachineLearningModel ::= RootRule:DecisionTreeRule ;
+abstract DecisionTreeElement ::= Preference:ItemPreference*;
+abstract DecisionTreeRule : DecisionTreeElement ::= Left:DecisionTreeElement Right:DecisionTreeElement <Label:String> ;
+ItemStateCheckRule : DecisionTreeRule ::= ItemStateCheck ;
+
+abstract ItemStateCheck ::= <Comparator:ComparatorType> ;
+rel ItemStateCheck.Item -> Item ;
+
+ItemStateNumberCheck : ItemStateCheck ::= <Value:double> ;
+ItemStateStringCheck : ItemStateCheck ::= <Value:String> ;
+DecisionTreeLeaf : DecisionTreeElement ::= <ActivityIdentifier:int> <Label:String> ;
diff --git a/eraser-base/src/main/jastadd/Imports.jadd b/eraser-base/src/main/jastadd/Imports.jadd
index 1c1a2e648f78bb681a54f23efa80e5febb59f013..319b024d1698c7efaabc89feed700f98fa24fa9c 100644
--- a/eraser-base/src/main/jastadd/Imports.jadd
+++ b/eraser-base/src/main/jastadd/Imports.jadd
@@ -1,5 +1,7 @@
 import java.util.*;
+import java.time.Instant;
 import de.tudresden.inf.st.eraser.util.MemberPrinter;
+import de.tudresden.inf.st.eraser.util.JavaUtils;
 
 aspect Imports {
 
diff --git a/eraser-base/src/main/jastadd/Item.jrag b/eraser-base/src/main/jastadd/Item.jrag
index 5a7804a3008e8f81e744c5bb557c3941e2523be6..07a45574afc6e2fd72c10530f51250dd6e685115 100644
--- a/eraser-base/src/main/jastadd/Item.jrag
+++ b/eraser-base/src/main/jastadd/Item.jrag
@@ -1,20 +1,19 @@
 aspect ItemHandling {
 
-  protected static final java.text.DateFormat Item.FORMAT = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS") {{
-    setTimeZone(TimeZone.getTimeZone("UTC"));
-  }};
   protected boolean Item.isFrozen = false;
   public void Item.freeze() { isFrozen = true; }
   public void Item.unfreeze() { isFrozen = false; }
+  public final boolean Item.isFrozen() { return isFrozen; }
 
   protected boolean Item.sendState = true;
   public void Item.disableSendState() { sendState = false; }
   public void Item.enableSendState() { sendState = true; }
+  public final boolean Item.isSendState() { return sendState; }
 
   //--- getStateAsString ---
   syn String Item.getStateAsString();
   eq ColorItem.getStateAsString() = getState().toString();
-  eq DateTimeItem.getStateAsString() = FORMAT.format(getState());
+  eq DateTimeItem.getStateAsString() = getState().toString();
   eq ItemWithBooleanState.getStateAsString() = Boolean.toString(getState());
   eq ItemWithDoubleState.getStateAsString() = Double.toString(getState());
   eq ItemWithStringState.getStateAsString() = getState();
@@ -24,9 +23,9 @@ aspect ItemHandling {
   // TupleHSB and String work like default
   eq ColorItem.getStateAsDouble() {
     logger.warn("getStateAsDouble called on item " + getLabel() + ". Using brightness.");
-    return getState().brightness;
+    return getState().getBrightness();
   }
-  eq DateTimeItem.getStateAsDouble() = getState().getTime();
+  eq DateTimeItem.getStateAsDouble() = getState().toEpochMilli();
   eq ItemWithBooleanState.getStateAsDouble() = getState() ? 1 : 0;
   eq ItemWithDoubleState.getStateAsDouble() = getState();
   eq ItemWithStringState.getStateAsDouble() {
@@ -51,12 +50,12 @@ aspect ItemHandling {
   }
   public void DateTimeItem.setStateFromString(String value) {
     try{
-      this.setState(FORMAT.parse(value));
-    } catch (java.text.ParseException e1) {
+      this.setState(Instant.parse(value));
+    } catch (java.time.format.DateTimeParseException e1) {
       // try to read input as number and use that
       try {
         long time = Long.parseLong(value);
-        this.setState(new Date(time));
+        this.setStateFromLong(time);
         // exit the method to avoid printing the error message for e1
         return;
       } catch (NumberFormatException e2) {
@@ -85,10 +84,10 @@ aspect ItemHandling {
   public abstract void Item.setStateFromLong(long value);
   public void ColorItem.setStateFromLong(long value) {
     // only set brightness
-    this.setState(TupleHSB.of(getState().hue, getState().saturation, Math.toIntExact(value)));
+    this.setState(getState().withDifferentBrightness(Math.toIntExact(value)));
   }
   public void DateTimeItem.setStateFromLong(long value) {
-    this.setState(new Date(value));
+    this.setState(Instant.ofEpochMilli(value));
   }
   public void ItemWithBooleanState.setStateFromLong(long value) {
     this.setState(value != 0);
@@ -113,7 +112,7 @@ aspect ItemHandling {
   }
   public void DateTimeItem.setStateFromBoolean(boolean value) {
     // there is no good way here
-    logger.warn("Ignoring color update using {} for {}", this, value);
+    logger.warn("Ignoring boolean update using {} for {}", value, this);
   }
   public void ItemWithBooleanState.setStateFromBoolean(boolean value) {
     this.setState(value);
@@ -137,7 +136,7 @@ aspect ItemHandling {
     setBrightness((int) value);
   }
   public void DateTimeItem.setStateFromDouble(double value) {
-    this.setState(new Date((int) value));
+    this.setStateFromLong((long) value);
   }
   public void ItemWithBooleanState.setStateFromDouble(double value) {
     this.setState(value != 0);
@@ -162,13 +161,13 @@ aspect ItemHandling {
   }
   public void DateTimeItem.setStateFromColor(TupleHSB value) {
     // there is no good way here
-    logger.warn("Ignoring color update using {} for {}", this, value);
+    logger.warn("Ignoring color update using {} for {}", value, this);
   }
   public void ItemWithBooleanState.setStateFromColor(TupleHSB value) {
-    this.setState(value != null && value.brightness > 0);
+    this.setState(value != null && value.getBrightness() > 0);
   }
   public void ItemWithDoubleState.setStateFromColor(TupleHSB value) {
-    this.setState(value.brightness);
+    this.setState(value.getBrightness());
   }
   public void ItemWithStringState.setStateFromColor(TupleHSB value) {
     this.setState(value.toString());
@@ -180,28 +179,28 @@ aspect ItemHandling {
     sendState = before;
   }
 
-  //--- setStateFromDate ---
-  public abstract void Item.setStateFromDate(Date value);
-  public void ColorItem.setStateFromDate(Date value) {
+  //--- setStateFromInstant ---
+  public abstract void Item.setStateFromInstant(Instant value);
+  public void ColorItem.setStateFromInstant(Instant value) {
     // there is no good way here
     logger.warn("Ignoring date update using {} for {}", this, value);
   }
-  public void DateTimeItem.setStateFromDate(Date value) {
-    this.setState(new Date(value.getTime()));
+  public void DateTimeItem.setStateFromInstant(Instant value) {
+    this.setState(value);
   }
-  public void ItemWithBooleanState.setStateFromDate(Date value) {
+  public void ItemWithBooleanState.setStateFromInstant(Instant value) {
     this.setState(value != null);
   }
-  public void ItemWithDoubleState.setStateFromDate(Date value) {
-    this.setState(value.getTime());
+  public void ItemWithDoubleState.setStateFromInstant(Instant value) {
+    this.setState(value.toEpochMilli());
   }
-  public void ItemWithStringState.setStateFromDate(Date value) {
-    this.setState(FORMAT.format(value));
+  public void ItemWithStringState.setStateFromInstant(Instant value) {
+    this.setState(value.toString());
   }
-  public void Item.setStateFromDate(Date value, boolean shouldSendState) {
+  public void Item.setStateFromInstant(Instant value, boolean shouldSendState) {
     boolean before = sendState;
     sendState = shouldSendState;
-    setStateFromDate(value);
+    setStateFromInstant(value);
     sendState = before;
   }
 
@@ -210,58 +209,58 @@ aspect ItemHandling {
   public String ItemWithStringState.getState() {   return get_state(); }
   public double ItemWithDoubleState.getState() {   return get_state(); }
   public TupleHSB ColorItem.getState() {           return get_state(); }
-  public Date DateTimeItem.getState() {            return get_state(); }
-
-  //--- copyState ---
-  public abstract Object Item.copyState();
-  // boolean can be copied
-  public Boolean ItemWithBooleanState.copyState() { return get_state(); }
-  public String ItemWithStringState.copyState() { return new String(get_state()); }
-  // long can be copied
-  public Double ItemWithDoubleState.copyState() { return get_state(); }
-  public TupleHSB ColorItem.copyState() { return get_state().clone(); }
-  public Object DateTimeItem.copyState() { return get_state().clone(); }
+  public Instant DateTimeItem.getState() {         return get_state(); }
 
   //--- setState(value) ---
   public void ItemWithBooleanState.setState(boolean value) { setState(value, sendState); }
   public void ItemWithStringState.setState(String value) { setState(value, sendState); }
   public void ItemWithDoubleState.setState(double value) { setState(value, sendState); }
   public void ColorItem.setState(TupleHSB value) { setState(value, sendState); }
-  public void DateTimeItem.setState(Date value) { setState(value, sendState); }
+  public void DateTimeItem.setState(Instant value) { setState(value, sendState); }
 
   //--- setState(value,shouldSendState) ---
   public void ItemWithBooleanState.setState(boolean value, boolean shouldSendState) {
-    if (isFrozen) { return; }
+    if (isFrozen || stateEquals(value)) { return; }
     set_state(value);
-    sendState0(shouldSendState);
+    stateUpdated(shouldSendState);
   }
 
   public void ItemWithStringState.setState(String value, boolean shouldSendState) {
-    if (isFrozen) { return; }
+    if (isFrozen || stateEquals(value)) { return; }
     set_state(value);
-    sendState0(shouldSendState);
+    stateUpdated(shouldSendState);
   }
 
   public void ItemWithDoubleState.setState(double value, boolean shouldSendState) {
-    if (isFrozen) { return; }
+    if (isFrozen || stateEquals(value)) { return; }
     set_state(value);
-    sendState0(shouldSendState);
+    stateUpdated(shouldSendState);
   }
 
   public void ColorItem.setState(TupleHSB value, boolean shouldSendState) {
-    if (isFrozen) { return; }
+    if (isFrozen || stateEquals(value)) { return; }
     set_state(value);
-    sendState0(shouldSendState);
+    stateUpdated(shouldSendState);
   }
 
-  public void DateTimeItem.setState(Date value, boolean shouldSendState) {
-    if (isFrozen) { return; }
+  public void DateTimeItem.setState(Instant value, boolean shouldSendState) {
+    if (isFrozen || stateEquals(value)) { return; }
     set_state(value);
-    sendState0(shouldSendState);
-  }
-
-  //--- sendState0 ---
-  protected void Item.sendState0(boolean shouldSendState) {
+    stateUpdated(shouldSendState);
+  }
+
+  //--- stateUpdated ---
+  /**
+   * Called, whenever the state of an item is updated. Does various things including:
+   * <ul>
+   *   <li>Send the new state via MQTT</li>
+   *   <li>Send the new state to Influx DB</li>
+   *   <li>Notify the attached {@link ItemObserver}, if any</li>
+   *   <li>Update state of controlled items</li>
+   * </ul>
+   * @param shouldSendState whether to send the new state (currently affects MQTT and Influx)
+   */
+  protected void Item.stateUpdated(boolean shouldSendState) {
     if (shouldSendState) {
       try {
         // sendState() defined in MQTT aspect
@@ -288,6 +287,9 @@ aspect ItemHandling {
   }
 
   public boolean ItemWithStringState.stateEquals(Object otherState) {
+    if (getState() == null) {
+      return otherState == null;
+    }
     return getState().equals(otherState);
   }
 
@@ -299,23 +301,29 @@ aspect ItemHandling {
   }
 
   public boolean ColorItem.stateEquals(Object otherState) {
+    if (getState() == null) {
+      return otherState == null;
+    }
     return getState().equals(otherState);
   }
 
   public boolean DateTimeItem.stateEquals(Object otherState) {
+    if (getState() == null) {
+      return otherState == null;
+    }
     return getState().equals(otherState);
   }
 
   //--- setStateToDefault ---
   public abstract void Item.setStateToDefault();
   public void ColorItem.setStateToDefault() { this.setState(TupleHSB.of(0, 0, 0)); }
-  public void DateTimeItem.setStateToDefault() { this.setState(new Date(0)); }
+  public void DateTimeItem.setStateToDefault() { this.setState(Instant.ofEpochSecond(0)); }
   public void ItemWithBooleanState.setStateToDefault() { this.setState(false); }
   public void ItemWithDoubleState.setStateToDefault() { this.setState(0.0); }
   public void ItemWithStringState.setStateToDefault() { this.setState(""); }
 
   //--- as$ItemType ---
-  // those attributes will raise a ClassCastException if called on the wrong item type. But else can we do?
+  // those attributes will raise a ClassCastException if called on the wrong item type. But what else can we do?
   syn ColorItem Item.asColorItem() = (ColorItem) this;
   syn ColorItem ColorItem.asColorItem() = this;
   syn ItemWithBooleanState Item.asItemWithBooleanState() = (ItemWithBooleanState) this;
@@ -342,13 +350,11 @@ aspect ItemHandling {
     controlling.setStateFromColor(this.getState());
   }
   protected void DateTimeItem.doUpdateFor(Item controlling) {
-    controlling.setStateFromDate(this.getState());
+    controlling.setStateFromInstant(this.getState());
   }
 
   private void ColorItem.setBrightness(int value) {
-    TupleHSB newState = getState().clone();
-    newState.brightness = value;
-    setState(newState);
+    setState(getState().withDifferentBrightness(value));
   }
 
   //--- ItemPreference.apply ---
diff --git a/eraser-base/src/main/jastadd/ItemHistoryPoints.jadd b/eraser-base/src/main/jastadd/ItemHistoryPoints.jadd
index f349743611f46e326a5514ae0ba3418653c70fc0..dcb050d0c8e09887a0809689ddfaa0a854d1edef 100644
--- a/eraser-base/src/main/jastadd/ItemHistoryPoints.jadd
+++ b/eraser-base/src/main/jastadd/ItemHistoryPoints.jadd
@@ -8,8 +8,8 @@ aspect ItemHistory{
           .build();
     }
     protected abstract org.influxdb.dto.Point.Builder createMeasurement();
-    public abstract java.time.Instant getTime();
-    public abstract void setTime(java.time.Instant time);
+    public abstract Instant getTime();
+    public abstract void setTime(Instant time);
     public abstract String getId();
     public abstract void setId(String id);
     public abstract T getState();
@@ -23,18 +23,18 @@ aspect ItemHistory{
 
   @org.influxdb.annotation.Measurement(name = BooleanStatePoint.NAME)
   public class BooleanStatePoint extends AbstractItemPoint<Boolean> {
-    @org.influxdb.annotation.Column(name = "time") protected java.time.Instant time;
+    @org.influxdb.annotation.Column(name = "time") protected Instant time;
     @org.influxdb.annotation.Column(name = "state") protected Boolean state;
     @org.influxdb.annotation.Column(name = "id", tag = true) protected String id;
-    public java.time.Instant getTime() { return time; }
-    public void setTime(java.time.Instant time) { this.time = time; }
+    public Instant getTime() { return time; }
+    public void setTime(Instant time) { this.time = time; }
     public String getId() { return id; }
     public void setId(String id) { this.id = id; }
     public void setState(Boolean state) { this.state = state; }
     public Boolean getState() { return state; }
 
     public static final String NAME = "ItemB";
-    public static BooleanStatePoint of(java.time.Instant time, Boolean state, String id) {
+    public static BooleanStatePoint of(Instant time, Boolean state, String id) {
       BooleanStatePoint result = new BooleanStatePoint();
       result.setTime(time); result.setState(state); result.setId(id);
       return result;
@@ -46,18 +46,18 @@ aspect ItemHistory{
 
   @org.influxdb.annotation.Measurement(name = StringStatePoint.NAME)
   public class StringStatePoint extends AbstractItemPoint<String> {
-    @org.influxdb.annotation.Column(name = "time") protected java.time.Instant time;
+    @org.influxdb.annotation.Column(name = "time") protected Instant time;
     @org.influxdb.annotation.Column(name = "state") protected String state;
     @org.influxdb.annotation.Column(name = "id", tag = true) protected String id;
-    public java.time.Instant getTime() { return time; }
-    public void setTime(java.time.Instant time) { this.time = time; }
+    public Instant getTime() { return time; }
+    public void setTime(Instant time) { this.time = time; }
     public String getId() { return id; }
     public void setId(String id) { this.id = id; }
     public void setState(String state) { this.state = state; }
     public String getState() { return state; }
 
     public static final String NAME = "ItemS";
-    public static StringStatePoint of(java.time.Instant time, String state, String id) {
+    public static StringStatePoint of(Instant time, String state, String id) {
       StringStatePoint result = new StringStatePoint();
       result.setTime(time); result.setState(state); result.setId(id);
       return result;
@@ -69,18 +69,18 @@ aspect ItemHistory{
 
   @org.influxdb.annotation.Measurement(name = DoubleStatePoint.NAME)
   public class DoubleStatePoint extends AbstractItemPoint<Double> {
-    @org.influxdb.annotation.Column(name = "time") protected java.time.Instant time;
+    @org.influxdb.annotation.Column(name = "time") protected Instant time;
     @org.influxdb.annotation.Column(name = "state") protected Double state;
     @org.influxdb.annotation.Column(name = "id", tag = true) protected String id;
-    public java.time.Instant getTime() { return time; }
-    public void setTime(java.time.Instant time) { this.time = time; }
+    public Instant getTime() { return time; }
+    public void setTime(Instant time) { this.time = time; }
     public String getId() { return id; }
     public void setId(String id) { this.id = id; }
     public void setState(Double state) { this.state = state; }
     public Double getState() { return state; }
 
     public static final String NAME = "ItemD";
-    public static DoubleStatePoint of(java.time.Instant time, Double state, String id) {
+    public static DoubleStatePoint of(Instant time, Double state, String id) {
       DoubleStatePoint result = new DoubleStatePoint();
       result.setTime(time); result.setState(state); result.setId(id);
       return result;
@@ -92,18 +92,18 @@ aspect ItemHistory{
 
   @org.influxdb.annotation.Measurement(name = ColorStatePoint.NAME)
   public class ColorStatePoint extends AbstractItemPoint<TupleHSB> {
-    @org.influxdb.annotation.Column(name = "time") protected java.time.Instant time;
+    @org.influxdb.annotation.Column(name = "time") protected Instant time;
     @org.influxdb.annotation.Column(name = "state") protected String state;
     @org.influxdb.annotation.Column(name = "id", tag = true) protected String id;
-    public java.time.Instant getTime() { return time; }
-    public void setTime(java.time.Instant time) { this.time = time; }
+    public Instant getTime() { return time; }
+    public void setTime(Instant time) { this.time = time; }
     public String getId() { return id; }
     public void setId(String id) { this.id = id; }
     public void setState(TupleHSB state) { this.state = state.toString(); }
     public TupleHSB getState() { return TupleHSB.parse(state); }
 
     public static final String NAME = "ItemC";
-    public static ColorStatePoint of(java.time.Instant time, TupleHSB state, String id) {
+    public static ColorStatePoint of(Instant time, TupleHSB state, String id) {
       ColorStatePoint result = new ColorStatePoint();
       result.setTime(time); result.setState(state); result.setId(id);
       return result;
@@ -114,19 +114,19 @@ aspect ItemHistory{
   }
 
   @org.influxdb.annotation.Measurement(name = DateTimeStatePoint.NAME)
-  public class DateTimeStatePoint extends AbstractItemPoint<Date> {
-    @org.influxdb.annotation.Column(name = "time") protected java.time.Instant time;
+  public class DateTimeStatePoint extends AbstractItemPoint<Instant> {
+    @org.influxdb.annotation.Column(name = "time") protected Instant time;
     @org.influxdb.annotation.Column(name = "state") protected long state;
     @org.influxdb.annotation.Column(name = "id", tag = true) protected String id;
-    public java.time.Instant getTime() { return time; }
-    public void setTime(java.time.Instant time) { this.time = time; }
+    public Instant getTime() { return time; }
+    public void setTime(Instant time) { this.time = time; }
     public String getId() { return id; }
     public void setId(String id) { this.id = id; }
-    public void setState(Date state) { this.state = state.getTime(); }
-    public Date getState() { return new Date(state); }
+    public void setState(Instant state) { this.state = state.toEpochMilli(); }
+    public Instant getState() { return Instant.ofEpochMilli(state); }
 
     public static final String NAME = "ItemT";
-    public static DateTimeStatePoint of(java.time.Instant time, Date state, String id) {
+    public static DateTimeStatePoint of(Instant time, Instant state, String id) {
       DateTimeStatePoint result = new DateTimeStatePoint();
       result.setTime(time); result.setState(state); result.setId(id);
       return result;
diff --git a/eraser-base/src/main/jastadd/Location.jrag b/eraser-base/src/main/jastadd/Location.jrag
new file mode 100644
index 0000000000000000000000000000000000000000..b61b260c998e15a89712e00f3e2c2a44eb84759b
--- /dev/null
+++ b/eraser-base/src/main/jastadd/Location.jrag
@@ -0,0 +1,10 @@
+aspect Location {
+  syn Optional<Location> Item.myLocation() {
+    if (this.hasLocation()) {
+      return Optional.of(this.getLocation());
+    } else {
+      return JavaUtils.ifPresentOrElseReturn(linkedThing(),
+          thing -> thing.hasLocation() ? Optional.of(thing.getLocation()) : Optional.empty(), () -> Optional.empty());
+    }
+  }
+}
diff --git a/eraser-base/src/main/jastadd/Location.relast b/eraser-base/src/main/jastadd/Location.relast
new file mode 100644
index 0000000000000000000000000000000000000000..3cdf199776adc6eaa575b31f867a16bae891e8d5
--- /dev/null
+++ b/eraser-base/src/main/jastadd/Location.relast
@@ -0,0 +1,3 @@
+Location ::= <Label:String> SubLocation:Location ;
+rel Location.Thing* <-> Thing.Location? ;
+rel Location.Item* <-> Item.Location? ;
diff --git a/eraser-base/src/main/jastadd/Logging.jadd b/eraser-base/src/main/jastadd/Logging.jadd
index 55c48fc56bbc88fe65274435b5b2da864f94487d..dd41bc70f125081ea2ab6291ae48fa0272eea4c4 100644
--- a/eraser-base/src/main/jastadd/Logging.jadd
+++ b/eraser-base/src/main/jastadd/Logging.jadd
@@ -6,7 +6,7 @@ aspect Logging {
   private org.apache.logging.log4j.Logger DummyMachineLearningModel.logger = org.apache.logging.log4j.LogManager.getLogger(DummyMachineLearningModel.class);
   private org.apache.logging.log4j.Logger Rule.logger = org.apache.logging.log4j.LogManager.getLogger(Rule.class);
   private org.apache.logging.log4j.Logger MqttRoot.logger = org.apache.logging.log4j.LogManager.getLogger(MqttRoot.class);
-  private org.apache.logging.log4j.Logger MachineLearningModel.logger = org.apache.logging.log4j.LogManager.getLogger(MachineLearningModel.class);
+  private org.apache.logging.log4j.Logger InternalMachineLearningModel.logger = org.apache.logging.log4j.LogManager.getLogger(MachineLearningModel.class);
   private org.apache.logging.log4j.Logger NeuralNetworkRoot.logger = org.apache.logging.log4j.LogManager.getLogger(NeuralNetworkRoot.class);
   private org.apache.logging.log4j.Logger OutputLayer.logger = org.apache.logging.log4j.LogManager.getLogger(OutputLayer.class);
 }
diff --git a/eraser-base/src/main/jastadd/MachineLearning.jrag b/eraser-base/src/main/jastadd/MachineLearning.jrag
index 53942b474a2944a9dee8bdb524780c91c970d61c..3ad31d3eac046655aa5d60ad6c13ee382a686041 100644
--- a/eraser-base/src/main/jastadd/MachineLearning.jrag
+++ b/eraser-base/src/main/jastadd/MachineLearning.jrag
@@ -1,43 +1,39 @@
 aspect MachineLearning {
 
+  public static final MachineLearningRoot MachineLearningRoot.createDefault() {
+    MachineLearningRoot result = new MachineLearningRoot();
+    return result;
+  }
+
   public interface Leaf {
     String getLabel();
     int getActivityIdentifier();
     List<ItemPreference> computePreferences();
   }
 
-  syn Leaf MachineLearningModel.classify();
+  syn Leaf InternalMachineLearningModel.classify();
 
   //--- currentActivityName ---
-  syn String Root.currentActivityName() = getMachineLearningRoot().hasActivityRecognition() ? getMachineLearningRoot().getActivityRecognition().classify().getLabel() : "no activity";
+  syn String Root.currentActivityName() = JavaUtils.ifPresentOrElseReturn(
+      currentActivity(),
+      Activity::getLabel,
+      () -> "no activity"
+    );
 
   //--- currentActivity ---
-  syn java.util.Optional<Activity> Root.currentActivity() = resolveActivity(getMachineLearningRoot().hasActivityRecognition() ? getMachineLearningRoot().getActivityRecognition().classify().getActivityIdentifier() : -1);
-
-  //--- currentPreferences ---
-  syn List<ItemPreference> Root.currentPreferences() = getMachineLearningRoot().getPreferenceLearning().classify().computePreferences();
-
-  //--- computePreferences ---
-  syn List<ItemPreference> DecisionTreeLeaf.computePreferences() {
-    // iterate over preference of this leaf, and all its parents and ancestors
-    List<ItemPreference> result = new ArrayList<>();
-    Set<Item> seenItems = new HashSet<>();
-    List<DecisionTreeElement> ancestors = ancestors();
-    for (ItemPreference pref : getPreferenceList()) {
-      result.add(pref);
-      seenItems.add(pref.getItem());
-    }
-    for (DecisionTreeElement ancestor : ancestors) {
-      for (ItemPreference pref : ancestor.getPreferenceList()) {
-        if (!seenItems.contains(pref.getItem())) {
-          result.add(pref);
-          seenItems.add(pref.getItem());
-        }
-      }
+  syn java.util.Optional<Activity> Root.currentActivity() {
+    return resolveActivity((int) getOpenHAB2Model().getActivityItem().getState());
+  }
+  private int Root.extractActivityIdentifier(List<ItemPreference> preferences) {
+    if (preferences.isEmpty()) {
+      return -1;
     }
-    return result;
+    return (int) ((ItemPreferenceDouble) preferences.get(0)).getPreferredValue();
   }
 
+  //--- currentPreferences ---
+  syn List<ItemPreference> Root.currentPreferences() = getMachineLearningRoot().getPreferenceLearning().getDecoder().classify().getPreferences();
+
   //--- canSetActivity ---
   syn boolean MachineLearningModel.canSetActivity() = false;
   eq DummyMachineLearningModel.canSetActivity() = true;
@@ -49,31 +45,48 @@ aspect MachineLearning {
   }
 
   //--- DummyMachineLearningModel.classify ---
-  eq DummyMachineLearningModel.classify() = getCurrent();
+  eq DummyMachineLearningModel.classify() {
+    if (logger.isInfoEnabled() && getItemList().size() > 0) {
+      logger.info("Dummy classification of {}, values of connected items: {}",
+          mlKind(),
+          getItemList().stream()
+              .map(item -> item.getID() + ":" + item.getStateAsString())
+              .collect(java.util.stream.Collectors.toList()));
+    }
+    return getCurrent();
+  }
 
   //--- DummyMachineLearningModel.createDefault() ---
   public static DummyMachineLearningModel DummyMachineLearningModel.createDefault() {
-    DummyMachineLearningModel dmlm = new DummyMachineLearningModel();
+    DummyMachineLearningModel dummy = new DummyMachineLearningModel();
     DecisionTreeLeaf current = new DecisionTreeLeaf();
     current.setActivityIdentifier(0);
     current.setLabel("Dummy");
     // no item preference set
-    dmlm.setCurrent(current);
-    return dmlm;
+    dummy.setCurrent(current);
+    return dummy;
   }
 
   //--- connectItems ---
   public abstract void MachineLearningModel.connectItems(List<String> itemNames);
   public void DummyMachineLearningModel.connectItems(List<String> itemNames) {
-    logger.info("Ignoring items to connect");
+    logger.info("Storing items to connect");
+    for (String itemName : itemNames) {
+      JavaUtils.ifPresentOrElse(getRoot().getOpenHAB2Model().resolveItem(itemName),
+          this::addItem,
+          () -> logger.warn("Could not resolve item '{}'", itemName));
+    }
   }
 
   //--- check ---
   /**
-   * Checks the NeuralNetwork for all necessary children.
+   * Checks the ML model for all necessary children.
    * @return true, if everything is alright. false otherwise
    */
-  public boolean MachineLearningModel.check() {
+  public abstract boolean MachineLearningModel.check();
+
+  @Override
+  public boolean InternalMachineLearningModel.check() {
     boolean good = true;
     if (getOutputApplication() == null) {
       logger.warn("{}: OutputApplication function is null!", mlKind());
@@ -97,8 +110,54 @@ aspect MachineLearning {
     return good;
   }
 
+  @Override
+  public boolean ExternalMachineLearningModel.check() {
+    throw new UnsupportedOperationException("check not available for external ML models (yet)!");
+  }
+
+  //--- mlKind ---
   inh String MachineLearningModel.mlKind();
   eq MachineLearningRoot.getActivityRecognition().mlKind() = "ActivityRecognition";
   eq MachineLearningRoot.getPreferenceLearning().mlKind() = "PreferenceLearning";
 
+  //... ExternalMachineLearningModel ...
+  private MachineLearningEncoder ExternalMachineLearningModel.encoder;
+  public void ExternalMachineLearningModel.setEncoder(MachineLearningEncoder encoder) {
+    this.encoder = encoder;
+  }
+  private MachineLearningDecoder ExternalMachineLearningModel.decoder;
+  public void ExternalMachineLearningModel.setDecoder(MachineLearningDecoder decoder) {
+    this.decoder = decoder;
+  }
+//  eq ExternalMachineLearningModel.classify() = null;
+  public void ExternalMachineLearningModel.connectItems(List<String> itemNames) { }
+
+  //... InternalMachineLearningModel ...
+  syn InternalMachineLearningHandler InternalMachineLearningModel.handler() {
+    return new InternalMachineLearningHandler().setModel(this);
+  }
+  cache InternalMachineLearningModel.handler();
+
+  //--- getEncoder ---
+  public abstract MachineLearningEncoder MachineLearningModel.getEncoder();
+  @Override
+  public MachineLearningEncoder InternalMachineLearningModel.getEncoder() {
+    return handler();
+  }
+  @Override
+  public MachineLearningEncoder ExternalMachineLearningModel.getEncoder() {
+    return this.encoder;
+  }
+
+  //--- getDecoder ---
+  public abstract MachineLearningDecoder MachineLearningModel.getDecoder();
+  @Override
+  public MachineLearningDecoder InternalMachineLearningModel.getDecoder() {
+    return handler();
+  }
+  @Override
+  public MachineLearningDecoder ExternalMachineLearningModel.getDecoder() {
+    return this.decoder;
+  }
+
 }
diff --git a/eraser-base/src/main/jastadd/MachineLearning.relast b/eraser-base/src/main/jastadd/MachineLearning.relast
new file mode 100644
index 0000000000000000000000000000000000000000..5bfeaddda253ba37b4d730dfd3638e2c1b1b70d5
--- /dev/null
+++ b/eraser-base/src/main/jastadd/MachineLearning.relast
@@ -0,0 +1,26 @@
+// ----------------    Machine Learning Model    ------------------------------
+MachineLearningRoot ::= [ActivityRecognition:MachineLearningModel] [PreferenceLearning:MachineLearningModel] Activity* ChangeEvent* ;
+
+Activity ::= <Identifier:int> <Label:String> ;
+
+abstract ChangeEvent ::= <Identifier:int> <Timestamp:long> ChangedItem* ;
+
+ChangedItem ::= <NewStateAsString:String> ;
+rel ChangedItem.Item -> Item ;
+
+RecognitionEvent : ChangeEvent ;
+rel RecognitionEvent.Activity -> Activity ;
+
+ManualChangeEvent : ChangeEvent ;
+
+abstract MachineLearningModel ::= ;
+ExternalMachineLearningModel : MachineLearningModel ;
+abstract InternalMachineLearningModel : MachineLearningModel ::= <OutputApplication:DoubleDoubleFunction> ;
+rel InternalMachineLearningModel.RelevantItem* <-> Item.RelevantInMachineLearningModel* ;
+rel InternalMachineLearningModel.TargetItem* <-> Item.TargetInMachineLearningModel* ;
+
+abstract ItemPreference ::= ;
+rel ItemPreference.Item -> Item ;
+
+ItemPreferenceColor : ItemPreference ::= <PreferredHSB:TupleHSB> ;
+ItemPreferenceDouble : ItemPreference ::= <PreferredValue:double> ;
diff --git a/eraser-base/src/main/jastadd/ModelStatistics.jrag b/eraser-base/src/main/jastadd/ModelStatistics.jrag
index 293fcd920a69bbdf3393add6fd62c451d0abe87a..443635f1e2f3ceb71785e5977d43ba27f55c2b6c 100644
--- a/eraser-base/src/main/jastadd/ModelStatistics.jrag
+++ b/eraser-base/src/main/jastadd/ModelStatistics.jrag
@@ -1,7 +1,7 @@
 aspect ModelStatistics {
 
   //--- numChannels ---
-  syn int Root.numChannels() {
+  syn int OpenHAB2Model.numChannels() {
     int sum = 0;
     for (Thing thing : getThingList()) {
       sum += thing.getNumChannel();
@@ -10,7 +10,7 @@ aspect ModelStatistics {
   }
 
   //--- description ---
-  syn String Root.description() = "["
+  syn String OpenHAB2Model.description() = "["
     + this.getNumThingType() + " thing type(s), "
     + this.getNumChannelType() + " channel type(s), "
     + this.numChannels() + " channel(s), "
diff --git a/eraser-base/src/main/jastadd/Navigation.jrag b/eraser-base/src/main/jastadd/Navigation.jrag
index bb57ce614c40c68bf1afc4bc53680d3c9a3a8884..69592c1bf0b980b6287cb28d10b7b131bd4383ef 100644
--- a/eraser-base/src/main/jastadd/Navigation.jrag
+++ b/eraser-base/src/main/jastadd/Navigation.jrag
@@ -1,36 +1,36 @@
 aspect Navigation {
 
-  syn Comparator<ModelElement> Root.modelElementComparator() {
+  syn Comparator<ModelElement> OpenHAB2Model.modelElementComparator() {
     return (e1, e2) -> (e1.getID().compareTo(e2.getID()));
   }
 
   //--- items ---
-  syn java.util.List<Item> Root.items() {
+  syn java.util.List<Item> OpenHAB2Model.items() {
     java.util.List<Item> result = new java.util.ArrayList<>();
     addItems(result, getGroupList());
     return result;
   }
 
-  private void Root.addItems(java.util.List<Item> result, JastAddList<Group> groups) {
+  private void OpenHAB2Model.addItems(java.util.List<Item> result, JastAddList<Group> groups) {
     groups.forEach(group -> group.getItemList().forEach(item -> result.add(item)));
   }
 
   //--- parameters ---
-  syn java.util.Set<Parameter> Root.parameters() {
+  syn java.util.Set<Parameter> OpenHAB2Model.parameters() {
     java.util.Set<Parameter> result = new java.util.TreeSet<>(modelElementComparator());
     getThingTypeList().forEach(tt -> tt.getParameterList().forEach(parameter -> result.add(parameter)));
     return result;
   }
 
   //--- channels ---
-  syn java.util.Set<Channel> Root.channels() {
+  syn java.util.Set<Channel> OpenHAB2Model.channels() {
     java.util.Set<Channel> result = new java.util.TreeSet<>(modelElementComparator());
     getThingList().forEach(thing -> thing.getChannelList().forEach(channel -> result.add(channel)));
     return result;
   }
 
   //--- resolveThingType ---
-  syn java.util.Optional<ThingType> Root.resolveThingType(String thingTypeId) {
+  syn java.util.Optional<ThingType> OpenHAB2Model.resolveThingType(String thingTypeId) {
     for (ThingType thingType : this.getThingTypeList()) {
       if (thingType.getID().equals(thingTypeId)) {
         return java.util.Optional.of(thingType);
@@ -40,7 +40,7 @@ aspect Navigation {
   }
 
   //--- resolveChannel ---
-  syn java.util.Optional<Channel> Root.resolveChannel(String channelId) {
+  syn java.util.Optional<Channel> OpenHAB2Model.resolveChannel(String channelId) {
     for (Thing thing : this.getThingList()) {
       for (Channel channel : thing.getChannelList()) {
         if (channel.getID().equals(channelId)) {
@@ -52,7 +52,7 @@ aspect Navigation {
   }
 
   //--- resolveChannelType ---
-  syn java.util.Optional<ChannelType> Root.resolveChannelType(String channelTypeId) {
+  syn java.util.Optional<ChannelType> OpenHAB2Model.resolveChannelType(String channelTypeId) {
     for (ChannelType channelType : this.getChannelTypeList()) {
       if (channelType.getID().equals(channelTypeId)) {
         return java.util.Optional.of(channelType);
@@ -62,7 +62,10 @@ aspect Navigation {
   }
 
   //--- resolveItem ---
-  syn java.util.Optional<Item> Root.resolveItem(String itemId) {
+  syn java.util.Optional<Item> OpenHAB2Model.resolveItem(String itemId) {
+    if ("activity".equals(itemId)) {
+      return Optional.of(getActivityItem());
+    }
     for (Item item : items()) {
       if (item.getID().equals(itemId)) {
         return java.util.Optional.of(item);
@@ -72,7 +75,7 @@ aspect Navigation {
   }
 
   //--- resolveGroup ---
-  syn java.util.Optional<Group> Root.resolveGroup(String groupId) {
+  syn java.util.Optional<Group> OpenHAB2Model.resolveGroup(String groupId) {
     for (Group group : this.getGroupList()) {
       if (group.getID().equals(groupId)) {
         return java.util.Optional.of(group);
@@ -87,7 +90,7 @@ aspect Navigation {
   }
 
   //--- resolveItemCategory ---
-  syn java.util.Optional<ItemCategory> Root.resolveItemCategory(String categoryName) {
+  syn java.util.Optional<ItemCategory> OpenHAB2Model.resolveItemCategory(String categoryName) {
     for (ItemCategory category : getItemCategoryList()) {
       if (category.getName().equals(categoryName)) {
         return java.util.Optional.of(category);
@@ -116,10 +119,6 @@ aspect Navigation {
     return java.util.Optional.empty();
   }
 
-  //--- containingChannel ---
-  inh Channel Link.containingChannel();
-  eq Channel.getLink().containingChannel() = this;
-
   //--- containingThing ---
   inh Thing Channel.containingThing();
   eq Thing.getChannel().containingThing() = this;
@@ -128,17 +127,23 @@ aspect Navigation {
   inh NeuralNetworkRoot OutputLayer.containingNeuralNetwork();
   eq NeuralNetworkRoot.getOutputLayer().containingNeuralNetwork() = this;
 
+  //--- linkedThing ---
+  syn Optional<Thing> Item.linkedThing() {
+    if (!this.hasChannel()) {
+      return Optional.empty();
+    }
+    Channel channel = this.getChannel();
+    Thing thing = channel.containingThing();
+    return Optional.of(thing);
+  }
+
   //--- getRoot ---
   inh Root ASTNode.getRoot();
-  eq Root.getChannelCategory().getRoot() = this;
+  eq Root.getOpenHAB2Model().getRoot() = this;
   eq Root.getMqttRoot().getRoot() = this;
   eq Root.getInfluxRoot().getRoot() = this;
   eq Root.getMachineLearningRoot().getRoot() = this;
-  eq Root.getThing().getRoot() = this;
-  eq Root.getGroup().getRoot() = this;
-  eq Root.getThingType().getRoot() = this;
-  eq Root.getChannelType().getRoot() = this;
   eq Root.getRule().getRoot() = this;
-  eq Root.getItemCategory().getRoot() = this;
   eq Root.getUser().getRoot() = this;
+  eq Root.getLocation().getRoot() = this;
 }
diff --git a/eraser-base/src/main/jastadd/NeuralNetwork.jrag b/eraser-base/src/main/jastadd/NeuralNetwork.jrag
index a5d04cee57e017d037d3defb882dbf0560f1c0c1..e673b69a1ea95c386320630fae1c8152cfe9cf78 100644
--- a/eraser-base/src/main/jastadd/NeuralNetwork.jrag
+++ b/eraser-base/src/main/jastadd/NeuralNetwork.jrag
@@ -57,7 +57,7 @@ aspect NeuralNetwork {
   //--- value ---
   syn double Neuron.value();
 
-  syn double HiddenNeuron.value() {
+  eq HiddenNeuron.value() {
     double[] inputs = new double[getInputs().size()];
     for (int i=0; i<inputs.length; ++i) {
       NeuronConnection connection = getInputList().get(i);
@@ -67,9 +67,11 @@ aspect NeuralNetwork {
     double result = getActivationFormula().apply(inputs);
 //    logger.debug("{}: {} -> {}", this, java.util.Arrays.toString(inputs), result);
     return result;
-    }
+  }
+
+  eq BiasNeuron.value() = 1;
 
-  syn double InputNeuron.value() {
+  eq InputNeuron.value() {
     return getItem().getStateAsDouble();
   }
 
@@ -98,7 +100,7 @@ aspect NeuralNetwork {
       }
       String itemName = itemNames.get(i);
       InputNeuron neuron = getInputNeuron(i);
-      de.tudresden.inf.st.eraser.util.JavaUtils.ifPresentOrElse(getRoot().resolveItem(itemName),
+      JavaUtils.ifPresentOrElse(getRoot().getOpenHAB2Model().resolveItem(itemName),
           neuron::setItem,
           () -> logger.warn("Could not resolve item '{}'", itemName));
     }
@@ -165,6 +167,12 @@ aspect NeuralNetwork {
     return good;
   }
 
+  @Override
+  public boolean BiasNeuron.check() {
+    setActivationFormula(inputs -> 1.0);
+    return super.check();
+  }
+
   //--- mlKind ---
   inh String OutputLayer.mlKind();
   inh String Neuron.mlKind();
diff --git a/eraser-base/src/main/jastadd/NeuralNetwork.relast b/eraser-base/src/main/jastadd/NeuralNetwork.relast
new file mode 100644
index 0000000000000000000000000000000000000000..1214eb199e7ae39e9bae4754eabe6ee0fcd14ae3
--- /dev/null
+++ b/eraser-base/src/main/jastadd/NeuralNetwork.relast
@@ -0,0 +1,20 @@
+// ----------------    Neural Network    ------------------------------
+NeuralNetworkRoot : InternalMachineLearningModel ::= InputNeuron* HiddenNeuron* OutputLayer ;
+
+OutputLayer ::= OutputNeuron* <Combinator:DoubleArrayDoubleFunction> ;
+rel OutputLayer.AffectedItem -> Item ;
+
+abstract Neuron ::= Output:NeuronConnection* ;
+
+NeuronConnection ::= <Weight:double> ;
+rel NeuronConnection.Neuron <-> Neuron.Input* ;
+
+InputNeuron : Neuron ;
+rel InputNeuron.Item -> Item ;
+
+HiddenNeuron : Neuron ::= <ActivationFormula:DoubleArrayDoubleFunction> ;
+BiasNeuron : HiddenNeuron ;
+OutputNeuron : HiddenNeuron ::= <Label:String> ;
+
+DummyMachineLearningModel : InternalMachineLearningModel ::= Current:DecisionTreeLeaf ;
+rel DummyMachineLearningModel.Item* -> Item ;
diff --git a/eraser-base/src/main/jastadd/Printing.jrag b/eraser-base/src/main/jastadd/Printing.jrag
index 106332708ec4239f0cf044ba562417c4e0aa06a2..53206bc16b2a09708a33386393ade1f3310e1666 100644
--- a/eraser-base/src/main/jastadd/Printing.jrag
+++ b/eraser-base/src/main/jastadd/Printing.jrag
@@ -3,8 +3,17 @@ aspect Printing {
 
   String ASTNode.safeID(ModelElement elem) { return elem == null ? "NULL" : elem.getID(); }
 
-//Root ::= Thing* Item* Group* ThingType* ChannelType* MqttRoot ;
   syn String Root.prettyPrint() {
+    StringBuilder sb = new StringBuilder();
+    sb.append(getOpenHAB2Model().prettyPrint());
+    sb.append(getMqttRoot().prettyPrint());
+    sb.append(getInfluxRoot().prettyPrint());
+    sb.append(getMachineLearningRoot().prettyPrint());
+    return sb.toString();
+  }
+
+//--- OpenHAB2Model.prettyPrint() ---
+  syn String OpenHAB2Model.prettyPrint() {
     StringBuilder sb = new StringBuilder();
     for (Thing t : getThingList()) {
       sb.append(t.prettyPrint());
@@ -27,8 +36,6 @@ aspect Printing {
     for (Channel c : channels()) {
       sb.append(c.prettyPrint());
     }
-    sb.append(getMqttRoot().prettyPrint());
-    sb.append(getInfluxRoot().prettyPrint());
     return sb.toString();
   }
 
@@ -49,9 +56,10 @@ aspect Printing {
         .addNonDefault("label", getLabel())
         .addRequired("state", getStateAsString())
         .addOptional("category", hasCategory(), () -> getCategory().getName())
-        .addOptional("topic", getTopic() != null, () -> getTopic().allParts())
+        .addOptional("topic", hasTopic(), () -> getTopic().getTopicString())
         .addIds("controls", getControllingList())
-        .addNodes("metaData", getNumMetaData(), getMetaDataList(), md -> "\"" + md.getKey() + "\":\"" + md.getValue() + "\"",
+        .addNodes("metaData", getNumMetaData(), getMetaDataList(),
+                  md -> "\"" + md.getKey() + "\":\"" + md.getValue() + "\"",
                   MemberPrinter.ListBracketType.CURLY)
         .build();
   }
@@ -68,8 +76,24 @@ aspect Printing {
   eq RollerShutterItem.prettyPrintType() = "RollerShutter Item" ;
   eq StringItem.prettyPrintType() = "String Item" ;
   eq SwitchItem.prettyPrintType() = "Switch Item" ;
+  eq ActivityItem.prettyPrintType() = "Activity Item" ;
   eq DefaultItem.prettyPrintType() = "Item" ;
 
+  // special ActivityItem printing. Always omit state.
+  syn String ActivityItem.prettyPrint() {
+    return new MemberPrinter(prettyPrintType())
+        .addRequired("id", getID())
+        .addNonDefault("label", getLabel())
+        .addOptional("category", hasCategory(), () -> getCategory().getName())
+        .addOptional("topic", hasTopic(), () -> getTopic().getTopicString())
+        .addIds("controls", getControllingList())
+        .addNodes("metaData", getNumMetaData(), getMetaDataList(),
+                  md -> "\"" + md.getKey() + "\":\"" + md.getValue() + "\"",
+                  MemberPrinter.ListBracketType.CURLY)
+        .build();
+  }
+
+
 //Group: id="" groups=["GROUP_ID", "GROUP_ID"] items=["ITEM_ID", "ITEM_ID"] aggregation=AGG;
 //       AGG either '"agg-name"', or '"agg-name" ("param1", "param2")'
   syn String Group.prettyPrint() {
@@ -142,7 +166,7 @@ aspect Printing {
     return new MemberPrinter("Channel")
         .addRequired("id", getID())
         .addRequired("type", getType(), ChannelType::getID)
-        .addIds("links", getNumLink(), getLinkList(), Link::getItem)
+        .addIds("links", getLinkedItems())
         .build();
   }
 
@@ -174,8 +198,13 @@ aspect Printing {
         .build();
   }
 
-  private boolean InfluxRoot.nonDefault(String actual, String expected) {
-    return actual != null && !actual.equals(expected);
+// Activities: { index: "name" }
+  syn String MachineLearningRoot.prettyPrint() {
+    return new MemberPrinter("ML")
+        .addNodes("activities", getNumActivity(), getActivityList(),
+                  activity -> activity.getIdentifier() + ":\"" + activity.getLabel() + "\"",
+                  MemberPrinter.ListBracketType.CURLY)
+        .build();
   }
 
 }
diff --git a/eraser-base/src/main/jastadd/Rules.jrag b/eraser-base/src/main/jastadd/Rules.jrag
index 0a3d508c146d9c325bd067cf4d0a915c276ceb39..69ae87d306a004978c0638c59ebba11335c2a27d 100644
--- a/eraser-base/src/main/jastadd/Rules.jrag
+++ b/eraser-base/src/main/jastadd/Rules.jrag
@@ -1,18 +1,10 @@
 aspect Rules {
 
-  Object ItemObserver.last_known_state = null;
-
-  // idea: abuse dependency tracking to trigger rule only iff item state has changed
-  syn boolean ItemObserver.apply() {
-    if (!observedItem().stateEquals(last_known_state)) {
-      // state has changed
-      last_known_state = observedItem().copyState();
-      for (Rule rule : getTriggeredRuleList()) {
-        rule.trigger(observedItem());
-      }
-      return true;
+  public void ItemObserver.apply() {
+    // state has changed, so trigger rules
+    for (Rule rule : getTriggeredRuleList()) {
+      rule.trigger(observedItem());
     }
-    return false;
   }
 
   public void Rule.trigger(Item triggeringItem) {
@@ -46,7 +38,6 @@ aspect Rules {
     } else {
       itemObserver = new ItemObserver();
       item.setItemObserver(itemObserver);
-      itemObserver.last_known_state = item.copyState();
     }
     // 2) Link event and itemObserver
     itemObserver.addTriggeredRule(this);
diff --git a/eraser-base/src/main/jastadd/Rules.relast b/eraser-base/src/main/jastadd/Rules.relast
new file mode 100644
index 0000000000000000000000000000000000000000..69854a70199e2d08dc3baa9fc1f49f2ac5ef2999
--- /dev/null
+++ b/eraser-base/src/main/jastadd/Rules.relast
@@ -0,0 +1,25 @@
+// --- New ECA rules ---
+Rule ::= Condition* Action* ;
+abstract Condition ;
+ItemStateCheckCondition : Condition ::= ItemStateCheck ;
+abstract Action ;
+LambdaAction : Action ::= <Lambda:Action2EditConsumer> ;
+
+TriggerRuleAction : Action ;
+rel TriggerRuleAction.Rule -> Rule ;
+
+abstract SetStateAction : Action ;
+rel SetStateAction.AffectedItem -> Item ;
+
+SetStateFromConstantStringAction : SetStateAction ::= <NewState:String> ;
+SetStateFromLambdaAction : SetStateAction ::= <NewStateProvider:NewStateProvider> ;
+SetStateFromTriggeringItemAction : SetStateAction ::= ;
+
+SetStateFromItemsAction : SetStateAction ::= <Combinator:ItemsToStringFunction> ;
+rel SetStateFromItemsAction.SourceItem* -> Item ;
+
+AddDoubleToStateAction : SetStateAction ::= <Increment:double> ;
+MultiplyDoubleToStateAction : SetStateAction ::= <Multiplier:double> ;
+
+ItemObserver ::= ;
+rel ItemObserver.TriggeredRule* <-> Rule.Observer* ;
diff --git a/eraser-base/src/main/jastadd/Util.jrag b/eraser-base/src/main/jastadd/Util.jrag
index b7e252afd51fda93273eba63e745483f7c78df59..282a44413c80ee2b5c5d71dca179ce4cf45c1fde 100644
--- a/eraser-base/src/main/jastadd/Util.jrag
+++ b/eraser-base/src/main/jastadd/Util.jrag
@@ -5,19 +5,31 @@ aspect Util {
 //    return new ExternalHost(hostName, 1883);
 //  }
   public void MqttRoot.setHostByName(String hostName) {
-    setHost(new ExternalHost(hostName, 1883));
+    setHost(ExternalHost.of(hostName, DEFAULT_PORT));
   }
 
   public void InfluxRoot.setHostByName(String hostName) {
-    setHost(new ExternalHost(hostName, 8086));
+    setHost(ExternalHost.of(hostName, DEFAULT_PORT));
+  }
+
+  public static ExternalHost ExternalHost.of(String hostName, int defaultPort) {
+    String host = hostName;
+    int port = defaultPort;
+    if (hostName.contains(":")) {
+      String[] parts = hostName.split(":");
+      host = parts[0];
+      port = Integer.parseInt(parts[1]);
+    }
+    return new ExternalHost(host, port);
   }
 
   syn String ExternalHost.urlAsString() = String.format("http://%s:%s", getHostName(), getPort());
 
   public static Root Root.createEmptyRoot() {
     Root model = new Root();
+    model.setOpenHAB2Model(new OpenHAB2Model());
     model.setMqttRoot(new MqttRoot());
-    model.setInfluxRoot(new InfluxRoot());
+    model.setInfluxRoot(InfluxRoot.createDefault());
     model.setMachineLearningRoot(new MachineLearningRoot());
     return model;
   }
diff --git a/eraser-base/src/main/jastadd/eraser.flex b/eraser-base/src/main/jastadd/eraser.flex
index 57627fc2a3fec198c70cc9f89b6a30c44745b374..58d16b9c919ba44d71366752650beca252df557b 100644
--- a/eraser-base/src/main/jastadd/eraser.flex
+++ b/eraser-base/src/main/jastadd/eraser.flex
@@ -33,7 +33,7 @@ WhiteSpace = [ ] | \t | \f | \n | \r | \r\n
 //Identifier = [:jletter:][:jletterdigit:]*
 Text = \" ([^\"]*) \"
 
-//Integer = [:digit:]+ // | "+" [:digit:]+ | "-" [:digit:]+
+Integer = [:digit:]+ // | "+" [:digit:]+ | "-" [:digit:]+
 //Real    = [:digit:]+ "." [:digit:]* | "." [:digit:]+
 
 Comment = "//" [^\n\r]+
@@ -55,7 +55,9 @@ Comment = "//" [^\n\r]+
 "Channel"      { return sym(Terminals.CHANNEL); }
 "Mqtt"         { return sym(Terminals.MQTT); }
 "Influx"       { return sym(Terminals.INFLUX); }
+"ML"           { return sym(Terminals.ML); }
 // special items (group already has a token definition)
+"Activity"     { return sym(Terminals.ACTIVITY); }
 "Color"        { return sym(Terminals.COLOR); }
 "Contact"      { return sym(Terminals.CONTACT); }
 "DateTime"     { return sym(Terminals.DATE_TIME); }
@@ -68,6 +70,7 @@ Comment = "//" [^\n\r]+
 "String"       { return sym(Terminals.STRING); }
 "Switch"       { return sym(Terminals.SWITCH); }
 // within specification
+"activities"   { return sym(Terminals.ACTIVITIES); }
 "aggregation"  { return sym(Terminals.AGGREGATION); }
 "category"     { return sym(Terminals.CATEGORY); }
 "channels"     { return sym(Terminals.CHANNELS); }
@@ -110,5 +113,7 @@ Comment = "//" [^\n\r]+
 //{Identifier}   { return sym(Terminals.NAME); }
 {Text}         { return symText(Terminals.TEXT); }
 //{Real}         { return sym(Terminals.REAL); }
-//{Integer}      { return sym(Terminals.INTEGER); }
+{Integer}      { return sym(Terminals.INTEGER); }
 <<EOF>>        { return sym(Terminals.EOF); }
+/* error fallback */
+[^]            { throw new Error("Illegal character '"+ yytext() +"' at line " + (yyline+1) + " column " + (yycolumn+1)); }
diff --git a/eraser-base/src/main/jastadd/eraser.parser b/eraser-base/src/main/jastadd/eraser.parser
index 2994b762e69f83a61986c0e843cc46710a47289d..acd54d7c68b1aca2eb8d811d5da685e37347d0a4 100644
--- a/eraser-base/src/main/jastadd/eraser.parser
+++ b/eraser-base/src/main/jastadd/eraser.parser
@@ -25,24 +25,26 @@ import java.util.HashMap;
 %goal goal;
 
 Root goal =
-     thing.t goal.r           {: insertZero(r.getThingList(), t); return r; :}
-  |  item.i goal.r            {: return r; :}
-  |  group.g goal.r           {: insertZero(r.getGroupList(), g); return r; :}
-  |  thing_type.tt goal.r     {: insertZero(r.getThingTypeList(), tt); return r; :}
-  |  parameter goal.r         {: return r; :}
-  |  channel_type.ct goal.r   {: insertZero(r.getChannelTypeList(), ct); return r; :}
-  |  channel.c goal.r         {: return r; :}
-  |  mqtt_root.mr goal.r      {: r.setMqttRoot(mr); return r; :}
-  |  influx_root.ir goal.r    {: r.setInfluxRoot(ir); return r; :}
-  |  thing.t           {: return eph.createRoot(t); :}
-  |  item.i            {: return eph.createRoot(); :}
-  |  group.g           {: return eph.createRoot(g); :}
-  |  thing_type.tt     {: return eph.createRoot(tt); :}
-  |  parameter         {: return eph.createRoot(); :}
-  |  channel_type.ct   {: return eph.createRoot(ct); :}
-  |  channel.c         {: return eph.createRoot(); :}
-  |  mqtt_root.mr      {: return eph.createRoot(mr); :}
-  |  influx_root.ir    {: return eph.createRoot(ir); :}
+     thing.t goal.r                     {: insertZero(r.getOpenHAB2Model().getThingList(), t); return r; :}
+  |  item.i goal.r                      {: return r; :}
+  |  group.g goal.r                     {: insertZero(r.getOpenHAB2Model().getGroupList(), g); return r; :}
+  |  thing_type.tt goal.r               {: insertZero(r.getOpenHAB2Model().getThingTypeList(), tt); return r; :}
+  |  parameter goal.r                   {: return r; :}
+  |  channel_type.ct goal.r             {: insertZero(r.getOpenHAB2Model().getChannelTypeList(), ct); return r; :}
+  |  channel.c goal.r                   {: return r; :}
+  |  mqtt_root.mr goal.r                {: r.setMqttRoot(mr); return r; :}
+  |  influx_root.ir goal.r              {: r.setInfluxRoot(ir); return r; :}
+  |  machine_learning_root.ml goal.r    {: r.setMachineLearningRoot(ml); return r; :}
+  |  thing.t                            {: return eph.createRoot(t); :}
+  |  item.i                             {: return eph.createRoot(); :}
+  |  group.g                            {: return eph.createRoot(g); :}
+  |  thing_type.tt                      {: return eph.createRoot(tt); :}
+  |  parameter                          {: return eph.createRoot(); :}
+  |  channel_type.ct                    {: return eph.createRoot(ct); :}
+  |  channel.c                          {: return eph.createRoot(); :}
+  |  mqtt_root.mr                       {: return eph.createRoot(mr); :}
+  |  influx_root.ir                     {: return eph.createRoot(ir); :}
+  |  machine_learning_root.ml           {: return eph.createRoot(ml); :}
   ;
 
 Thing thing =
@@ -73,10 +75,11 @@ Item item =
   |  ROLLER_SHUTTER ITEM COLON item_body.ib SEMICOLON {: return eph.retype(new RollerShutterItem(), ib); :}
   |  STRING ITEM COLON item_body.ib SEMICOLON         {: return eph.retype(new StringItem(), ib); :}
   |  SWITCH ITEM COLON item_body.ib SEMICOLON         {: return eph.retype(new SwitchItem(), ib); :}
+  |  ACTIVITY ITEM COLON item_body.ib SEMICOLON       {: return eph.retype(new ActivityItem(), ib); :}
   |  ITEM COLON item_body.ib SEMICOLON                {: return eph.retype(new DefaultItem(), ib); :}
   ;
 
-// ITEM_TYPE Item: id="" label="" state="" category="" topic="" controls=["ITEM_ID"] metaData=["key":"value"] ;
+// ITEM_TYPE Item: id="" label="" state="" category="" topic="" controls=["ITEM_ID"] metaData={"key":"value"} ;
 Item item_body =
      ID EQUALS TEXT.n item_body.i       {: return eph.setID(i, n); :}
   |  LABEL EQUALS TEXT.n item_body.i    {: i.setLabel(n); return i; :}
@@ -194,8 +197,20 @@ InfluxRoot influx_root_body =
   |                                                  {: return InfluxRoot.createDefault(); :}
   ;
 
+// Machine Learning
+MachineLearningRoot machine_learning_root =
+     ML COLON machine_learning_root_body.b SEMICOLON     {: return b; :}
+  ;
+
+// ML: activities={index:"name"} ;
+MachineLearningRoot machine_learning_root_body =
+    ACTIVITIES EQUALS integer_map.map machine_learning_root_body.b    {: return eph.setActivities(b, map); :}
+  |                                                                   {: return MachineLearningRoot.createDefault(); :}
+  ;
+
 StringList string_list =
      LB_SQUARE string_list_body.slb RB_SQUARE         {: return slb; :}
+  |  LB_SQUARE RB_SQUARE                              {: return new StringList(); :}
   ;
 
 StringList string_list_body =
@@ -206,11 +221,11 @@ StringList string_list_body =
        result.add(n);
        return result;
     :}
-  |                                                   {: return new StringList(); :}
   ;
 
 StringList round_string_list =
      LB_ROUND round_string_list_body.slb RB_ROUND     {: return slb; :}
+  |  LB_ROUND RB_ROUND                                {: return new StringList(); :}
   ;
 
 StringList round_string_list_body =
@@ -221,20 +236,34 @@ StringList round_string_list_body =
        result.add(n);
        return result;
     :}
-  |                                                   {: return new StringList(); :}
   ;
 
-StringMap string_map =
+StringKeyMap string_map =
      LB_CURLY string_map_body.smb RB_CURLY             {: return smb; :}
+  |  LB_CURLY RB_CURLY                                 {: return new StringKeyMap(); :}
   ;
 
-StringMap string_map_body =
+StringKeyMap string_map_body =
      TEXT.key COLON TEXT.value COMMA string_map_body.smb {: smb.put(key, value); return smb; :}
   |  TEXT.key COLON TEXT.value
     {:
-       StringMap result = new StringMap();
+       StringKeyMap result = new StringKeyMap();
        result.put(key, value);
        return result;
     :}
-  |                                                      {: return new StringMap(); :}
+  ;
+
+IntegerKeyMap integer_map =
+     LB_CURLY integer_map_body.imb RB_CURLY             {: return imb; :}
+  |  LB_CURLY RB_CURLY                                  {: return new IntegerKeyMap(); :}
+  ;
+
+IntegerKeyMap integer_map_body =
+     INTEGER.key COLON TEXT.value COMMA integer_map_body.imb {: imb.put(Integer.parseInt(key), value); return imb; :}
+  |  INTEGER.key COLON TEXT.value
+    {:
+       IntegerKeyMap result = new IntegerKeyMap();
+       result.put(Integer.parseInt(key), value);
+       return result;
+    :}
   ;
diff --git a/eraser-base/src/main/jastadd/main.relast b/eraser-base/src/main/jastadd/main.relast
index 425ede01d98a532f6ff8c43920c9e983735c6dc4..066ffbbc6f4eeb18930c645d246cb57112a2c8a9 100644
--- a/eraser-base/src/main/jastadd/main.relast
+++ b/eraser-base/src/main/jastadd/main.relast
@@ -1,135 +1,12 @@
 // ----------------    Main    ------------------------------
-Root ::= Thing* Group* ThingType* ChannelType* ChannelCategory* ItemCategory* User* MqttRoot InfluxRoot
-         MachineLearningRoot Rule* ;
-
-// ----------------    openHAB    ------------------------------
-abstract ModelElement ::= <ID:String> ;
-abstract LabelledModelElement : ModelElement ::= <Label:String> ;
-abstract DescribableModelElement : LabelledModelElement ::= <Description:String> ;
-
-ThingType : DescribableModelElement ::= Parameter* ;
-
-Thing : LabelledModelElement ::= Channel* ;
-
-ChannelType : DescribableModelElement ::= <ItemType:ItemType> <ReadOnly:boolean> ;
-
-abstract ChannelCategory ;
-DefaultChannelCategory : ChannelCategory ::= <Value:DefaultChannelCategoryValue> ;
-SimpleChannelCategory : ChannelCategory ::= <Value:String> ;
-
-Channel : ModelElement ::= Link* ;
-
-Link ::= <Item:Item> ;
-
-Parameter : DescribableModelElement ::= <Type:ParameterValueType> [DefaultValue:ParameterDefaultValue] <Context:String> <Required:boolean> ;
-ParameterDefaultValue ::= <Value:String> ;
-
-abstract Item : LabelledModelElement ::= <_fetched_data:boolean> MetaData:ItemMetaData* [ItemObserver] ;
-ItemMetaData ::= <Key:String> <Value:String> ;
-abstract ItemWithBooleanState : Item ::= <_state:boolean> ;
-abstract ItemWithStringState : Item ::= <_state:String> ;
-abstract ItemWithDoubleState : Item ::= <_state:double> ;
-ColorItem : Item ::= <_state:TupleHSB> ;
-DateTimeItem : Item ::= <_state:Date> ;
-ContactItem : ItemWithBooleanState ;
-DimmerItem : ItemWithDoubleState ;
-ImageItem : ItemWithStringState ;
-LocationItem : ItemWithStringState ;
-NumberItem : ItemWithDoubleState ;
-PlayerItem : ItemWithStringState ;
-RollerShutterItem : ItemWithBooleanState ;
-StringItem : ItemWithStringState ;
-SwitchItem : ItemWithBooleanState ;
-DefaultItem : ItemWithStringState ;
-
-ItemCategory ::= <Name:String> ;
-
-Group : LabelledModelElement ::= Group* Item* [AggregationFunction:GroupAggregationFunction] ;
-abstract GroupAggregationFunction ;
-SimpleGroupAggregationFunction : GroupAggregationFunction ::= <FunctionName:SimpleGroupAggregationFunctionName> ;
-ParameterizedGroupAggregationFunction : GroupAggregationFunction ::= <FunctionName:ParameterizedGroupAggregationFunctionName>
-                                                                     <Param1:String> <Param2:String> ;
+Root ::= OpenHAB2Model User* MqttRoot InfluxRoot MachineLearningRoot Rule* Location* ;
 
 // ----------------    Users   ------------------------------
 User : LabelledModelElement ;
+rel Root.CurrentUser? -> User ;
 
 // ----------------    Util    ------------------------------
 ExternalHost ::= <HostName:String> <Port:int> ;
 
-// ----------------    MQTT    ------------------------------
-MqttRoot ::= Topic:MqttTopic* <IncomingPrefix:String> <OutgoingPrefix:String> [Host:ExternalHost] ;
-MqttTopic ::= <Part:String> SubTopic:MqttTopic* ;
-
 // ----------------    InfluxDB    ------------------------------
 InfluxRoot ::= <User:String> <Password:String> <DbName:String> [Host:ExternalHost] ;
-
-// ----------------    Machine Learning Model    ------------------------------
-MachineLearningRoot ::= [ActivityRecognition:MachineLearningModel] [PreferenceLearning:MachineLearningModel] Activity* ChangeEvent* ;
-Activity ::= <Identifier:int> <Label:String> ;
-abstract ChangeEvent ::= <Identifier:int> <Timestamp:long> ChangedItem* ;
-ChangedItem ::= <NewStateAsString:String> ;
-RecognitionEvent : ChangeEvent ;
-ManualChangeEvent : ChangeEvent ;
-abstract MachineLearningModel ::= <OutputApplication:DoubleDoubleFunction> ;
-abstract ItemPreference ::= ;
-ItemPreferenceColor : ItemPreference ::= <PreferredHSB:TupleHSB> ;
-ItemPreferenceDouble : ItemPreference ::= <PreferredValue:double> ;
-
-// ----------------    Decision Tree    ------------------------------
-DecisionTreeRoot : MachineLearningModel ::= RootRule:DecisionTreeRule ;
-abstract DecisionTreeElement ::= Preference:ItemPreference*;
-abstract DecisionTreeRule : DecisionTreeElement ::= Left:DecisionTreeElement Right:DecisionTreeElement <Label:String> ;
-ItemStateCheckRule : DecisionTreeRule ::= ItemStateCheck ;
-abstract ItemStateCheck ::= <Comparator:ComparatorType> ;
-ItemStateNumberCheck : ItemStateCheck ::= <Value:double> ;
-ItemStateStringCheck : ItemStateCheck ::= <Value:String> ;
-DecisionTreeLeaf : DecisionTreeElement ::= <ActivityIdentifier:int> <Label:String> ;
-
-// ----------------    Neural Network    ------------------------------
-NeuralNetworkRoot : MachineLearningModel ::= InputNeuron* HiddenNeuron* OutputLayer ;
-OutputLayer ::= OutputNeuron* <Combinator:DoubleArrayDoubleFunction> ;
-abstract Neuron ::= Output:NeuronConnection* ;
-NeuronConnection ::= <Weight:double> ;
-InputNeuron : Neuron ;
-HiddenNeuron : Neuron ::= <ActivationFormula:DoubleArrayDoubleFunction> ;
-OutputNeuron : HiddenNeuron ::= <Label:String> ;
-
-DummyMachineLearningModel : MachineLearningModel ::= <Current:DecisionTreeLeaf> ;
-
-// --- New ECA rules ---
-Rule ::= Condition* Action* ;
-abstract Condition ;
-ItemStateCheckCondition : Condition ::= ItemStateCheck ;
-abstract Action ;
-LambdaAction : Action ::= <Lambda:Action2EditConsumer> ;
-TriggerRuleAction : Action ;
-abstract SetStateAction : Action ;
-SetStateFromConstantStringAction : SetStateAction ::= <NewState:String> ;
-SetStateFromLambdaAction : SetStateAction ::= <NewStateProvider:NewStateProvider> ;
-SetStateFromTriggeringItemAction : SetStateAction ::= ;
-SetStateFromItemsAction : SetStateAction ::= <Combinator:ItemsToStringFunction> ;
-AddDoubleToStateAction : SetStateAction ::= <Increment:double> ;
-MultiplyDoubleToStateAction : SetStateAction ::= <Multiplier:double> ;
-ItemObserver ::= ;
-
-// ----------------    Relations    ------------------------------
-rel ThingType.ChannelType* -> ChannelType ;
-rel Thing.Type -> ThingType ;
-rel Channel.Type -> ChannelType ;
-rel ChannelType.ChannelCategory -> ChannelCategory ;
-rel Item.Topic <-> MqttTopic.Item ;
-rel Item.Category? -> ItemCategory ;
-rel ItemStateCheck.Item -> Item ;
-rel NeuronConnection.Neuron <-> Neuron.Input* ;
-rel InputNeuron.Item -> Item ;
-rel OutputLayer.AffectedItem -> Item ;
-rel ItemPreference.Item -> Item ;
-rel Item.Controlling* <-> Item.ControlledBy* ;
-rel Root.CurrentUser? -> User ;
-rel ChangedItem.Item -> Item ;
-rel RecognitionEvent.Activity -> Activity ;
-
-rel TriggerRuleAction.Rule -> Rule ;
-rel SetStateAction.AffectedItem -> Item ;
-rel SetStateFromItemsAction.SourceItem* -> Item ;
-rel ItemObserver.TriggeredRule* <-> Rule.Observer* ;
diff --git a/eraser-base/src/main/jastadd/mqtt.jrag b/eraser-base/src/main/jastadd/mqtt.jrag
index ba20820fedd6cc368313145f443daf7961c3983e..65b36d151111c4a7a58fa7ae14f57a37accb1033 100644
--- a/eraser-base/src/main/jastadd/mqtt.jrag
+++ b/eraser-base/src/main/jastadd/mqtt.jrag
@@ -3,39 +3,25 @@ aspect MQTT {
   // --- default values ---
   private static final int MqttRoot.DEFAULT_PORT = 1883;
 
-  java.util.Set<String> MqttRoot.ignoredTopics = new java.util.HashSet<>();
-
   //--- resolveTopic ---
   syn java.util.Optional<MqttTopic> MqttRoot.resolveTopic(String topic) {
     ensureCorrectPrefixes();
     if (!topic.startsWith(getIncomingPrefix())) {
-      logger.debug("Topic '{}' does not start with incoming prefix '{}'", topic, getIncomingPrefix());
+      logger.warn("Topic '{}' does not start with incoming prefix '{}'", topic, getIncomingPrefix());
       return java.util.Optional.empty();
     }
-    topic = topic.substring(getIncomingPrefix().length());
-    String[] tokens = topic.split("/");
-    int tokenIndex = 0;
-    java.util.Optional<MqttTopic> result = check(tokens, 0, getTopics());
-    if (!result.isPresent() && !ignoredTopics.contains(topic)) {
-      logger.error("Could not resolve {}, ignoring it.", topic);
-      ignoredTopics.add(topic);
-    }
-    return result;
+    String suffix = topic.substring(getIncomingPrefix().length());
+    return resolveTopicSuffix(suffix);
   }
 
-  java.util.Optional<MqttTopic> MqttRoot.check(String[] tokens, int tokenIndex, JastAddList<MqttTopic> topics) {
-    for (MqttTopic current : topics) {
-      if (tokens[tokenIndex].equals(current.getPart())) {
-        // topic part matches, move on or return if tokens are empty
-        ++tokenIndex;
-        if (tokens.length == tokenIndex) {
-          return java.util.Optional.of(current);
-        } else {
-          return check(tokens, tokenIndex, current.getSubTopics());
-        }
+  //--- resolveTopicSuffix ---
+  syn java.util.Optional<MqttTopic> MqttRoot.resolveTopicSuffix(String suffix) {
+    for (MqttTopic current : getTopics()) {
+      if (current.getTopicString().equals(suffix)) {
+        return Optional.of(current);
       }
     }
-    return java.util.Optional.empty();
+    return Optional.empty();
   }
 
   public void MqttRoot.ensureCorrectPrefixes() {
@@ -48,19 +34,10 @@ aspect MQTT {
   }
 
   //--- getIncomingTopic ---
-  syn String MqttTopic.getIncomingTopic() = getMqttRoot().getIncomingPrefix() + allParts();
+  syn String MqttTopic.getIncomingTopic() = getMqttRoot().getIncomingPrefix() + getTopicString();
 
   //--- getOutgoingTopic ---
-  syn String MqttTopic.getOutgoingTopic() = getMqttRoot().getOutgoingPrefix() + allParts();
-
-  //--- allParts ---
-  inh String MqttTopic.allParts();
-  eq MqttTopic.getSubTopic(int i).allParts() {
-    return allParts() + "/" + getSubTopic(i).getPart();
-  }
-  eq MqttRoot.getTopic(int i).allParts() {
-    return getTopic(i).getPart();
-  }
+  syn String MqttTopic.getOutgoingTopic() = getMqttRoot().getOutgoingPrefix() + getTopicString();
 
   //--- getMqttSender (should be cached) ---
   cache MqttRoot.getMqttSender();
@@ -78,7 +55,6 @@ aspect MQTT {
   inh MqttRoot MqttTopic.getMqttRoot();
 
   eq MqttRoot.getTopic().getMqttRoot() = this;
-  eq MqttTopic.getSubTopic().getMqttRoot() = getMqttRoot();
 
   /**
    * Sends the current state via MQTT.
diff --git a/eraser-base/src/main/jastadd/mqtt.relast b/eraser-base/src/main/jastadd/mqtt.relast
new file mode 100644
index 0000000000000000000000000000000000000000..cf477ff72e303415e47f42c939afe26cdfc4ed54
--- /dev/null
+++ b/eraser-base/src/main/jastadd/mqtt.relast
@@ -0,0 +1,4 @@
+// ----------------    MQTT    ------------------------------
+MqttRoot ::= Topic:MqttTopic* <IncomingPrefix:String> <OutgoingPrefix:String> [Host:ExternalHost] ;
+MqttTopic ::= <TopicString:String> ;
+rel Item.Topic? <-> MqttTopic.Item* ;
diff --git a/eraser-base/src/main/jastadd/openhab.jrag b/eraser-base/src/main/jastadd/openhab.jrag
new file mode 100644
index 0000000000000000000000000000000000000000..6d30e4b3e0fb91225c9e8ea08bfdf3ab1afb1357
--- /dev/null
+++ b/eraser-base/src/main/jastadd/openhab.jrag
@@ -0,0 +1,5 @@
+aspect OpenHAB2 {
+  syn ActivityItem OpenHAB2Model.getActivityItem() {
+    return new ActivityItem();
+  }
+}
diff --git a/eraser-base/src/main/jastadd/openhab.relast b/eraser-base/src/main/jastadd/openhab.relast
new file mode 100644
index 0000000000000000000000000000000000000000..7d7d9afa27f9f9998f6c5bd4b9822f0dfa774e8d
--- /dev/null
+++ b/eraser-base/src/main/jastadd/openhab.relast
@@ -0,0 +1,57 @@
+// ----------------    openHAB    ------------------------------
+OpenHAB2Model ::= Thing* Group* ThingType* ChannelType* ChannelCategory* ItemCategory* /ActivityItem:Item/ ;
+
+abstract ModelElement ::= <ID:String> ;
+abstract LabelledModelElement : ModelElement ::= <Label:String> ;
+abstract DescribableModelElement : LabelledModelElement ::= <Description:String> ;
+
+ThingType : DescribableModelElement ::= Parameter* ;
+rel ThingType.ChannelType* -> ChannelType ;
+
+Thing : LabelledModelElement ::= Channel* ;
+rel Thing.Type -> ThingType ;
+
+ChannelType : DescribableModelElement ::= <ItemType:ItemType> <ReadOnly:boolean> ;
+rel ChannelType.ChannelCategory -> ChannelCategory ;
+
+abstract ChannelCategory ;
+DefaultChannelCategory : ChannelCategory ::= <Value:DefaultChannelCategoryValue> ;
+SimpleChannelCategory : ChannelCategory ::= <Value:String> ;
+
+Channel : ModelElement ::= ;
+rel Channel.Type -> ChannelType ;
+rel Channel.LinkedItem* <-> Item.Channel? ;
+
+Parameter : DescribableModelElement ::= <Type:ParameterValueType> [DefaultValue:ParameterDefaultValue] <Context:String> <Required:boolean> ;
+ParameterDefaultValue ::= <Value:String> ;
+
+abstract Item : LabelledModelElement ::= <_fetched_data:boolean> MetaData:ItemMetaData* [ItemObserver] ;
+rel Item.Category? -> ItemCategory ;
+rel Item.Controlling* <-> Item.ControlledBy* ;
+
+abstract ItemWithBooleanState : Item ::= <_state:boolean> ;
+abstract ItemWithStringState : Item ::= <_state:String> ;
+abstract ItemWithDoubleState : Item ::= <_state:double> ;
+ColorItem : Item ::= <_state:TupleHSB> ;
+DateTimeItem : Item ::= <_state:Instant> ;
+ContactItem : ItemWithBooleanState ;
+DimmerItem : ItemWithDoubleState ;
+ImageItem : ItemWithStringState ;
+LocationItem : ItemWithStringState ;
+NumberItem : ItemWithDoubleState ;
+PlayerItem : ItemWithStringState ;
+RollerShutterItem : ItemWithBooleanState ;
+StringItem : ItemWithStringState ;
+SwitchItem : ItemWithBooleanState ;
+DefaultItem : ItemWithStringState ;
+ActivityItem : ItemWithDoubleState ;
+
+ItemMetaData ::= <Key:String> <Value:String> ;
+
+ItemCategory ::= <Name:String> ;
+
+Group : LabelledModelElement ::= Group* Item* [AggregationFunction:GroupAggregationFunction] ;
+abstract GroupAggregationFunction ;
+SimpleGroupAggregationFunction : GroupAggregationFunction ::= <FunctionName:SimpleGroupAggregationFunctionName> ;
+ParameterizedGroupAggregationFunction : GroupAggregationFunction ::= <FunctionName:ParameterizedGroupAggregationFunctionName>
+                                                                     <Param1:String> <Param2:String> ;
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/Main.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/Main.java
index 9b98f688fce2bba0dadffeb6584f03ff1f28628d..9b9aaba5b25e860177545415c5ca1c088a3e7165 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/Main.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/Main.java
@@ -6,8 +6,6 @@ import de.tudresden.inf.st.eraser.jastadd.model.Root;
 import de.tudresden.inf.st.eraser.openhab2.OpenHab2Importer;
 import de.tudresden.inf.st.eraser.openhab2.mqtt.MQTTUpdater;
 import de.tudresden.inf.st.eraser.util.ParserUtils;
-import org.apache.commons.math3.linear.MatrixUtils;
-import org.apache.commons.math3.linear.RealMatrix;
 import org.apache.logging.log4j.LogManager;
 
 import java.io.*;
@@ -16,7 +14,7 @@ import java.io.*;
  * Main entry point for testing eraser.
  * @author rschoene - Initial contribution
  */
-@SuppressWarnings({"unused", "WeakerAccess", "RedundantThrows"})
+@SuppressWarnings({"unused", "RedundantThrows"})
 public class Main {
 
   public static void main(String[] args) throws IOException, Parser.Exception {
@@ -25,35 +23,6 @@ public class Main {
 //    Root model = importFromOpenHab();
 //    testPrinterWith(model);
 //    testUpdaterWith(model);
-    testXY_to_RGB();
-  }
-
-  private static void testXY_to_RGB() {
-    /*
-    XYZ to RGB [M]-1
-     2.0413690 -0.5649464 -0.3446944
-    -0.9692660  1.8760108  0.0415560
-     0.0134474 -0.1183897  1.0154096
-     */
-    double[][] matrixData = { { 2.0413690, -0.5649464, -0.3446944},
-                              {-0.9692660,  1.8760108,  0.0415560},
-                              { 0.0134474, -0.1183897,  1.0154096}};
-    RealMatrix mInverted = MatrixUtils.createRealMatrix(matrixData);
-    BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
-
-    while (true) {
-      try {
-        double x = readFromSystemIn(in, "x:");
-        double y = readFromSystemIn(in, "y:");
-        double z = 1;
-        RealMatrix xyz = MatrixUtils.createColumnRealMatrix(new double[] {x,y,z});
-        RealMatrix result = mInverted.multiply(xyz);
-        System.out.println(result);
-      } catch (IOException | NumberFormatException e) {
-        e.printStackTrace();
-        break;
-      }
-    }
   }
 
   private static double readFromSystemIn(BufferedReader in, String prompt) throws IOException {
@@ -70,7 +39,7 @@ public class Main {
 
   private static void testPrinterWith(Root model) {
     model.flushTreeCache();
-    System.out.println("Got model: " + model.description());
+    System.out.println("Got model: " + model.getOpenHAB2Model().description());
     System.out.println("PrettyPrinted:");
     System.out.println(model.prettyPrint());
   }
@@ -79,7 +48,7 @@ public class Main {
     Root model;
 //    model = importFromOpenHab();
     model = importFromFile();
-    System.out.println("Got model: " + model.description());
+    System.out.println("Got model: " + model.getOpenHAB2Model().description());
 //    JsonSerializer.write(model, "openhab2-data.json");
     testUpdaterWith(model);
   }
@@ -106,7 +75,7 @@ public class Main {
 
   public static Root importFromOpenHab() {
     OpenHab2Importer importer = new OpenHab2Importer();
-    return importer.importFrom("192.168.1.250", 8080);
+    return importer.importFrom("192.168.1.250", 8080).getRoot();
   }
 
   public static Root importFromFile() {
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/deserializer/ASTNodeDeserializer.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/deserializer/ASTNodeDeserializer.java
index 95f1d470c014ce4a123b5e01e33e7b049ba8ceff..d349331de9e8e43b7513001c659f94e61cc94ae8 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/deserializer/ASTNodeDeserializer.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/deserializer/ASTNodeDeserializer.java
@@ -48,12 +48,17 @@ public class ASTNodeDeserializer extends StdDeserializer<ASTNode> {
     r.put(c.getName(), ((node, model) -> f.apply(model, termValue(node, terminalName))));
   }
 
+  private void addResolverForOpenHAB2Model(Map<String, ResolveAstNodeForOpenHAB2Model> r, Class<?> c, BiFunction<OpenHAB2Model, String, Optional<? extends ASTNode>> f, String terminalName) {
+    r.put(c.getName(), ((node, model) -> f.apply(model, termValue(node, terminalName))));
+  }
+
   private Map<String, ResolveAstNode> resolvers = new HashMap<>();
+  private Map<String, ResolveAstNodeForOpenHAB2Model> resolversForOpenHAB2Model = new HashMap<>();
 
   private void initResolvers() {
-    addResolver(resolvers, ThingType.class, Root::resolveThingType, "ID");
-    addResolver(resolvers, ChannelType.class, Root::resolveChannelType, "ID");
-    addResolver(resolvers, Item.class, Root::resolveItem, "ID");
+    addResolverForOpenHAB2Model(resolversForOpenHAB2Model, ThingType.class, OpenHAB2Model::resolveThingType, "ID");
+    addResolverForOpenHAB2Model(resolversForOpenHAB2Model, ChannelType.class, OpenHAB2Model::resolveChannelType, "ID");
+    addResolverForOpenHAB2Model(resolversForOpenHAB2Model, Item.class, OpenHAB2Model::resolveItem, "ID");
     addResolver(resolvers, MqttTopic.class, Root::resolveMqttTopic, "IncomingTopic");
   }
 
@@ -363,11 +368,15 @@ public class ASTNodeDeserializer extends StdDeserializer<ASTNode> {
   }
 
   }
-
 interface ResolveAstNode {
   Optional<? extends ASTNode> resolve(JsonNode node, Root model) throws IOException;
 }
 
+
+interface ResolveAstNodeForOpenHAB2Model {
+  Optional<? extends ASTNode> resolve(JsonNode node, OpenHAB2Model model) throws IOException;
+}
+
 class ResolveLater {
   JsonNode node;
 
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/InternalMachineLearningHandler.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/InternalMachineLearningHandler.java
new file mode 100644
index 0000000000000000000000000000000000000000..2c59f46ca48fc18afa7aa096571a19ef9bef4b44
--- /dev/null
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/InternalMachineLearningHandler.java
@@ -0,0 +1,54 @@
+package de.tudresden.inf.st.eraser.jastadd.model;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.time.Instant;
+import java.util.List;
+
+/**
+ * Adapter for internally held machine learning models.
+ *
+ * @author rschoene - Initial contribution
+ */
+public class InternalMachineLearningHandler implements MachineLearningEncoder, MachineLearningDecoder {
+
+  private static final Logger logger = LogManager.getLogger(InternalMachineLearningHandler.class);
+  private InternalMachineLearningModel model;
+
+  public InternalMachineLearningHandler setModel(InternalMachineLearningModel model) {
+    this.model = model;
+    return this;
+  }
+
+  @Override
+  public void newData(Root model, List<Item> changedItems) {
+    logger.debug("Ignored new data of {}", changedItems);
+  }
+
+  @Override
+  public List<Item> getTargets() {
+    return model.getTargetItems();
+  }
+
+  @Override
+  public List<Item> getRelevantItems() {
+    return model.getRelevantItems();
+  }
+
+  @Override
+  public void triggerTraining() {
+    logger.debug("Ignored training trigger.");
+  }
+
+  @Override
+  public MachineLearningResult classify() {
+    List<ItemPreference> preferences = model.classify().computePreferences();
+    return new InternalMachineLearningResult(preferences);
+  }
+
+  @Override
+  public Instant lastModelUpdate() {
+    return null;
+  }
+}
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/InternalMachineLearningResult.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/InternalMachineLearningResult.java
new file mode 100644
index 0000000000000000000000000000000000000000..087020b383fee89ea839b2056e22072af799a9f3
--- /dev/null
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/InternalMachineLearningResult.java
@@ -0,0 +1,21 @@
+package de.tudresden.inf.st.eraser.jastadd.model;
+
+import java.util.List;
+
+/**
+ * Result of a classification returned by an internally held machine learning model.
+ *
+ * @author rschoene - Initial contribution
+ */
+public class InternalMachineLearningResult implements MachineLearningResult {
+  private final List<ItemPreference> preferences;
+
+  InternalMachineLearningResult(List<ItemPreference> preferences) {
+    this.preferences = preferences;
+  }
+
+  @Override
+  public List<ItemPreference> getPreferences() {
+    return this.preferences;
+  }
+}
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningDecoder.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningDecoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..a6e01fb945226bcf91e4bc94663c24b561f7cd6f
--- /dev/null
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningDecoder.java
@@ -0,0 +1,28 @@
+package de.tudresden.inf.st.eraser.jastadd.model;
+
+import java.time.Instant;
+
+/**
+ * This interface represents the connection from a machine learning model back to the knowledge base.
+ * It decodes the output of the machine learning model and outputs the result of the classification.
+ *
+ * @author rschoene - Initial contribution
+ */
+@SuppressWarnings("unused")
+public interface MachineLearningDecoder {
+
+  /**
+   * Execute the machine learning model and returns the classification result.
+   * @return the result of the classification
+   */
+  MachineLearningResult classify();
+
+  // less important
+
+  /**
+   * Returns the time when the model was last updated, i.e., when the last training was completed.
+   * @return the time when the model was last updated, or <code>null</code> if the model was not trained yet
+   */
+  Instant lastModelUpdate();
+
+}
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningEncoder.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningEncoder.java
new file mode 100644
index 0000000000000000000000000000000000000000..8cf4d78fd92a2c18bd4914ab5b805db63995be85
--- /dev/null
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningEncoder.java
@@ -0,0 +1,48 @@
+package de.tudresden.inf.st.eraser.jastadd.model;
+
+import de.tudresden.inf.st.eraser.jastadd.model.Item;
+import de.tudresden.inf.st.eraser.jastadd.model.Root;
+
+import java.util.List;
+
+/**
+ * This interface represents the connection from knowledge base to one machine learning model.
+ * It takes information from the knowledge base, and encodes them to a representation that is readable both for
+ * the used technique and the purpose of the machine learning model.
+ *
+ * @author rschoene - Initial contribution
+ */
+@SuppressWarnings("unused")
+public interface MachineLearningEncoder {
+
+  /**
+   * Update when new data is available.
+   * @param model        The underlying model
+   * @param changedItems A list of items whose state has changed
+   */
+  void newData(Root model, List<Item> changedItems);
+
+  // to be discussed, in which form this is specified
+
+  /**
+   * Get the items that this model is supposed to change.
+   * @return the list of targeted items
+   */
+  List<Item> getTargets();
+
+  // to be discussed, in which form this is specified
+
+  /**
+   * Get the items which are relevant for the decision making of this model.
+   * @return the list of items relevant for decision making
+   */
+  List<Item> getRelevantItems();
+
+  // to be discussed, if this is necessary
+
+  /**
+   * Explicit hint for this model to start/trigger training. The model might ignore this hint.
+   */
+  void triggerTraining();
+
+}
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningResult.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningResult.java
new file mode 100644
index 0000000000000000000000000000000000000000..eebd16d3679c1531cb02d1ba1abd0c1610303668
--- /dev/null
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/MachineLearningResult.java
@@ -0,0 +1,25 @@
+package de.tudresden.inf.st.eraser.jastadd.model;
+
+import de.tudresden.inf.st.eraser.jastadd.model.ItemPreference;
+
+import java.util.List;
+
+/**
+ * Representation of a classification result using a MachineLearningModel.
+ *
+ * @author rschoene - Initial contribution
+ */
+@SuppressWarnings("unused")
+public interface MachineLearningResult {
+
+  // Object rawClass();
+
+  // double rawConfidence();
+
+  // can be used for both activity and preferences
+  /**
+   * Get the result as a list of item preferences, i.e., new states to be set for those items.
+   * @return the classification result as item preferences
+   */
+  List<ItemPreference> getPreferences();
+}
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/TupleHSB.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/TupleHSB.java
index a3520402e367ba88ab6f5f218915f3b0c5447f84..8e9ee2ad31f8d16ff415bcb7fe82bd34eb87f02a 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/TupleHSB.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/jastadd/model/TupleHSB.java
@@ -8,9 +8,9 @@ import java.util.Objects;
  * @author rschoene - Initial contribution
  */
 public class TupleHSB implements Cloneable {
-  public int hue;
-  public int saturation;
-  public int brightness;
+  private int hue;
+  private int saturation;
+  private int brightness;
   public static TupleHSB of(int hue, int saturation, int brightness) {
     TupleHSB result = new TupleHSB();
     result.hue = hue;
@@ -19,6 +19,30 @@ public class TupleHSB implements Cloneable {
     return result;
   }
 
+  public int getHue() {
+    return hue;
+  }
+
+  public int getSaturation() {
+    return saturation;
+  }
+
+  public int getBrightness() {
+    return brightness;
+  }
+
+  public TupleHSB withDifferentHue(int hue) {
+    return TupleHSB.of(hue, this.saturation, this.brightness);
+  }
+
+  public TupleHSB withDifferentSaturation(int saturation) {
+    return TupleHSB.of(this.hue, saturation, this.brightness);
+  }
+
+  public TupleHSB withDifferentBrightness(int brightness) {
+    return TupleHSB.of(this.hue, this.saturation, brightness);
+  }
+
   public String toString() {
     return String.format("%s,%s,%s", hue, saturation, brightness);
   }
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java
index 627167f83a7773e2923dc31508904405042bc25b..4e1742c7f48b31e040cc70866e8ecd5070622172 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/OpenHab2Importer.java
@@ -42,7 +42,7 @@ public class OpenHab2Importer {
     nonDefaultChannelCategories = new HashSet<>();
   }
 
-  public Root importFrom(String host, int port) {
+  public OpenHAB2Model importFrom(String host, int port) {
     /*
     Plan:
     - requesting: thing-types, channel-types, things, items, links
@@ -58,7 +58,8 @@ public class OpenHab2Importer {
     mapper.registerModule(module);
 
     try {
-      Root model = Root.createEmptyRoot();
+      Root root = Root.createEmptyRoot();
+      OpenHAB2Model model = root.getOpenHAB2Model();
       ThingTypeData[] thingTypeList = mapper.readValue(makeURL(thingTypesUrl, hostAndPort), ThingTypeData[].class);
       logger.info("Read a total of {} thing type(s).", thingTypeList.length);
       update(model, thingTypeList);
@@ -84,17 +85,6 @@ public class OpenHab2Importer {
         ThingTypeData data = mapper.readValue(makeURL(thingTypeDetailUrl, hostAndPort, thingType.getID()), ThingTypeData.class);
         update(thingType, data);
       }
-
-      // create empty MQTT root
-      MqttRoot mqttRoot = new MqttRoot();
-      mqttRoot.setHostByName(host);
-      model.setMqttRoot(mqttRoot);
-
-      // create empty Influx root
-      InfluxRoot influxRoot = InfluxRoot.createDefault();
-      influxRoot.setHostByName(host);
-      model.setInfluxRoot(influxRoot);
-
       return model;
     } catch (IOException e) {
       logger.catching(e);
@@ -110,7 +100,7 @@ public class OpenHab2Importer {
     return URI.create(String.format(formatUrlString, hostAndPort, id)).toURL();
   }
 
-  private void update(Root model, ThingTypeData[] thingTypeList) {
+  private void update(OpenHAB2Model model, ThingTypeData[] thingTypeList) {
     for (ThingTypeData thingTypeData : thingTypeList) {
       ThingType thingType = new ThingType();
       thingType.setID(thingTypeData.UID);
@@ -120,7 +110,7 @@ public class OpenHab2Importer {
     }
   }
 
-  private void update(Root model, ChannelTypeData[] channelTypeList) {
+  private void update(OpenHAB2Model model, ChannelTypeData[] channelTypeList) {
     for (ChannelTypeData channelTypeData : channelTypeList) {
       ChannelType channelType = new ChannelType();
       channelType.setID(channelTypeData.UID);
@@ -173,7 +163,7 @@ public class OpenHab2Importer {
     }
   }
 
-  private void update(Root model, ThingData[] thingList) {
+  private void update(OpenHAB2Model model, ThingData[] thingList) {
     for (ThingData thingData : thingList) {
       Thing thing = new Thing();
       thing.setID(thingData.UID);
@@ -192,7 +182,7 @@ public class OpenHab2Importer {
     }
   }
 
-  private void update(Root model, AbstractItemData[] itemList) {
+  private void update(OpenHAB2Model model, AbstractItemData[] itemList) {
     List<Tuple<Group, GroupItemData>> groupsWithMembers = new ArrayList<>();
     List<Tuple<Group, GroupItemData>> groupsInGroups = new ArrayList<>();
     List<Tuple<Item, AbstractItemData>> itemsInGroups = new ArrayList<>();
@@ -309,12 +299,10 @@ public class OpenHab2Importer {
     }
   }
 
-  private void update(Root model, LinkData[] linkList) {
+  private void update(OpenHAB2Model model, LinkData[] linkList) {
     for (LinkData linkData : linkList) {
-      Link link = new Link();
       ifPresent(model.resolveChannel(linkData.channelUID), "Channel", linkData,
-          channel -> channel.addLink(link));
-      ifPresent(model.resolveItem(linkData.itemName), "Item", linkData, link::setItem);
+          channel -> ifPresent(model.resolveItem(linkData.itemName), "Item", linkData, channel::addLinkedItem));
     }
   }
 
@@ -353,7 +341,7 @@ public class OpenHab2Importer {
     }
   }
 
-  public Root importFrom(URL baseUrl) {
+  public OpenHAB2Model importFrom(URL baseUrl) {
     return importFrom(baseUrl.getHost(),
         baseUrl.getPort() == -1 ? baseUrl.getDefaultPort() : baseUrl.getPort());
   }
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/mqtt/MQTTUpdater.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/mqtt/MQTTUpdater.java
index 2bf5880087ccc171ea4e2799c0db4fa1bda11e9d..647114843839aef335b5337cb62e744870d3a0e3 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/mqtt/MQTTUpdater.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/openhab2/mqtt/MQTTUpdater.java
@@ -1,23 +1,15 @@
 package de.tudresden.inf.st.eraser.openhab2.mqtt;
 
+import de.tudresden.inf.st.eraser.jastadd.model.ExternalHost;
 import de.tudresden.inf.st.eraser.jastadd.model.Item;
 import de.tudresden.inf.st.eraser.jastadd.model.Root;
 import de.tudresden.inf.st.eraser.util.MqttReceiver;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
-import org.fusesource.hawtbuf.Buffer;
-import org.fusesource.hawtbuf.UTF8Buffer;
-import org.fusesource.mqtt.client.*;
+import org.fusesource.mqtt.client.QoS;
 
 import java.io.IOException;
-import java.net.URI;
-import java.util.Arrays;
-import java.util.Objects;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
 
 /**
  * Update an imported model by subscribing to MQTT topics.
@@ -36,28 +28,33 @@ public class MQTTUpdater implements AutoCloseable {
     this.delegatee = new MqttReceiver();
   }
 
-  public MQTTUpdater(Root model) throws IllegalArgumentException {
+  public MQTTUpdater(Root root) throws IllegalArgumentException {
     this();
-    this.setModel(model);
+    this.setRoot(root);
   }
 
   /**
-   * Sets the model to update
-   * @param model the model to update
+   * Sets the model root to update
+   * @param root the model root to update
    */
-  public void setModel(Root model) {
-    delegatee.setHost(model.getMqttRoot().getHost().getHostName());
+  public void setRoot(Root root) {
+    ExternalHost host = root.getMqttRoot().getHost();
+    delegatee.setHost(host.getHostName(), host.getPort());
     delegatee.setOnMessage((topicString, message) ->
-        model.getMqttRoot().resolveTopic(topicString).ifPresent(topic ->
-            itemUpdate(topic.getItem(), message)));
-    delegatee.setTopicsForSubscription(model.getMqttRoot().getIncomingPrefix() + "#");
+        root.getMqttRoot().resolveTopic(topicString).ifPresent(topic ->
+            topic.getItems().forEach(
+                item -> itemUpdate(item, message))));
+    delegatee.setTopicsForSubscription(root.getMqttRoot().getIncomingPrefix() + "#");
     delegatee.setQoSForSubscription(QoS.AT_LEAST_ONCE);
   }
 
   private void itemUpdate(Item item, String state) {
-    this.logger.debug("Update state of {} [{}] from '{}' to '{}'.",
-        item.getLabel(), item.getID(), item.getStateAsString(), state);
-    item.setStateFromString(state, false);
+    String oldState = item.getStateAsString();
+    if (oldState == null || !oldState.equals(state)) {
+      this.logger.debug("Update state of {} [{}] from '{}' to '{}'.",
+          item.getLabel(), item.getID(), oldState, state);
+      item.setStateFromString(state, false);
+    }
   }
 
   /**
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java
index 0d1d7334b5ef642cc94a02a47aeb475b44ed13b1..400a09affa32b8990ccc82396d795b2dd6ce8cb4 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/parser/EraserParserHelper.java
@@ -1,6 +1,7 @@
 package de.tudresden.inf.st.eraser.parser;
 
 import de.tudresden.inf.st.eraser.jastadd.model.*;
+import de.tudresden.inf.st.eraser.util.JavaUtils;
 import de.tudresden.inf.st.eraser.util.ParserUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -72,7 +73,7 @@ public class EraserParserHelper {
     this.root.getMqttRoot().ensureCorrectPrefixes();
 
     resolveList(channelMap, missingChannelListMap, Thing::addChannel);
-    resolveList(itemMap, missingItemLinkListMap, (channel, item) -> channel.addLink(new Link(item)));
+    resolveList(itemMap, missingItemLinkListMap, Channel::addLinkedItem);
     resolveList(groupMap, missingSubGroupListMap, Group::addGroup);
     resolveList(itemMap, missingItemListMap, this::addItemToGroup);
     resolveList(channelTypeMap, missingChannelTypeListMap, ThingType::addChannelType);
@@ -114,13 +115,13 @@ public class EraserParserHelper {
           sortedDanglingItems.add(item);
         }
       }
-      ParserUtils.createUnknownGroup(this.root, sortedDanglingItems);
+      ParserUtils.createUnknownGroup(this.root.getOpenHAB2Model(), sortedDanglingItems);
     }
   }
 
   private void createChannelCategories() {
     channelCategoryMap.values().stream().sorted(Comparator.comparing(this::ident)).forEach(
-        cc -> root.addChannelCategory(cc));
+        cc -> root.getOpenHAB2Model().addChannelCategory(cc));
     channelCategoryMap.clear();
   }
 
@@ -128,7 +129,7 @@ public class EraserParserHelper {
     Map<String, ItemCategory> newCategories = new HashMap<>();
     missingItemCategoryMap.forEach((item, category) ->
         item.setCategory(newCategories.computeIfAbsent(category, ItemCategory::new)));
-    newCategories.values().forEach(root::addItemCategory);
+    newCategories.values().forEach(node -> root.getOpenHAB2Model().addItemCategory(node));
   }
 
   private void checkUnusedElements() {
@@ -139,7 +140,7 @@ public class EraserParserHelper {
     if (elem instanceof ModelElement) {
       return ((ModelElement) elem).getID();
     } else if (elem instanceof MqttTopic) {
-      return safeAllParts((MqttTopic) elem);
+      return ((MqttTopic) elem).getTopicString();
     } else if (elem instanceof DefaultChannelCategory) {
       return ((DefaultChannelCategory) elem).getValue().name();
     } else if (elem instanceof SimpleChannelCategory) {
@@ -148,85 +149,73 @@ public class EraserParserHelper {
     return elem.toString();
   }
 
-  private String safeAllParts(MqttTopic elem) {
-    StringBuilder sb = new StringBuilder(elem.getPart());
-    ASTNode parent;
-    while (true) {
-      parent = elem.getParent();
-      if (parent == null) break;
-      assert parent instanceof List;
-      parent = parent.getParent();
-      if (parent == null || parent instanceof MqttRoot) {
-        break;
-      }
-      elem = (MqttTopic) parent;
-      sb.insert(0, elem.getPart() + "/");
-    }
-    return sb.toString();
+  private <Src extends ASTNode, Target extends ASTNode> void resolveList(
+      Map<String, Target> resolved, Map<Src, Iterable<String>> missing, BiConsumer<Src, Target> adder) {
+    missing.forEach(
+        (elem, keyList) -> keyList.forEach(
+            key -> resolve0(resolved, key, elem, adder)));
+    missing.clear();
   }
 
-  private <Src extends ASTNode, Target extends ASTNode> void resolveList(Map<String, Target> resolved, Map<Src, Iterable<String>> missing, BiConsumer<Src, Target> adder) {
-    missing.forEach((elem, keyList) -> keyList.forEach(key -> {
-      Target value = resolved.get(key);
-      if (value == null) {
-        logger.warn("Reference in {} {} for '{}' cannot be resolved",
-            elem.getClass().getSimpleName(), ident(elem), key);
-        return;
-      }
-      if (checkUnusedElements) {
-        unusedElements.remove(value);
-      }
-      adder.accept(elem, value);
-    }));
+  private <Src extends ASTNode, Target extends ASTNode> void resolve(
+      Map<String, Target> resolved, Map<Src, String> missing, BiConsumer<Src, Target> setter) {
+    missing.forEach(
+        (elem, key) -> resolve0(resolved, key, elem, setter));
     missing.clear();
   }
 
-  private <Src extends ASTNode, Target extends ASTNode> void resolve(Map<String, Target> resolved, Map<Src, String> missing, BiConsumer<Src, Target> setter) {
-    missing.forEach((elem, key) -> {
-      Target value = resolved.get(key);
-      if (value == null) {
-        logger.warn("Reference in {} {} for '{}' cannot be resolved",
-            elem.getClass().getSimpleName(), ident(elem), key);
-        return;
-      }
-      if (checkUnusedElements) {
-        unusedElements.remove(value);
-      }
-      setter.accept(elem, value);
-    });
-    missing.clear();
+  private <Src extends ASTNode, Target extends ASTNode> void resolve0(
+      Map<String, Target> resolved, String key, Src elem, BiConsumer<Src, Target> action) {
+    Target value = resolved.get(key);
+    if (value == null) {
+      logger.warn("Reference in {} {} for '{}' cannot be resolved",
+          elem.getClass().getSimpleName(), ident(elem), key);
+      return;
+    }
+    if (checkUnusedElements) {
+      unusedElements.remove(value);
+    }
+    action.accept(elem, value);
   }
 
+  //--- Thing and ThingType ---
+
   public Thing addThingType(Thing t, String typeName) {
     missingThingTypeMap.put(t, typeName);
     return t;
   }
 
-  public ChannelType setItemType(ChannelType ct, String itemTypeName) {
-    ct.setItemType(ItemType.valueOf(itemTypeName));
-    return ct;
+  public ThingType setChannelTypes(ThingType tt, StringList channelTypeNames) {
+    missingChannelTypeListMap.put(tt, channelTypeNames);
+    return tt;
   }
 
-  public Item setCategory(Item item, String categoryName) {
-    missingItemCategoryMap.put(item, categoryName);
-    return item;
+  public ThingType setParameters(ThingType tt, StringList parameterNames) {
+    missingParameterListMap.put(tt, parameterNames);
+    return tt;
   }
 
-  public Item setTopic(Item item, String mqttTopicName) {
-    missingTopicMap.put(item, mqttTopicName);
-    return item;
+  public Thing setChannels(Thing t, StringList channelNames) {
+    missingChannelListMap.put(t, channelNames);
+    return t;
   }
 
-  public Item setControlling(Item item, StringList controlling) {
-    missingControllingListMap.put(item, controlling);
-    return item;
+  public Thing setID(Thing thing, String id) {
+    thing.setID(id);
+    return thing;
   }
 
-  public Item setMetaData(Item item, StringMap metaData) {
-    for (AbstractMap.SimpleEntry<String, String> entry : metaData) {
-      item.addMetaData(new ItemMetaData(entry.getKey(), entry.getValue()));
-    }
-    return item;
+  public ThingType setID(ThingType thingType, String id) {
+    thingType.setID(id);
+    thingTypeMap.put(id, thingType);
+    return thingType;
+  }
+
+  //--- Channel and ChannelType ---
+
+  public ChannelType setItemType(ChannelType ct, String itemTypeName) {
+    ct.setItemType(ItemType.valueOf(itemTypeName));
+    return ct;
   }
 
   public ChannelType setChannelCategory(ChannelType ct, String name) {
@@ -244,6 +233,95 @@ public class EraserParserHelper {
     return ct;
   }
 
+  public Channel setChannelType(Channel c, String channelTypeName) {
+    missingChannelTypeMap.put(c, channelTypeName);
+    return c;
+  }
+
+  public Channel setLinks(Channel c, StringList linkNames) {
+    missingItemLinkListMap.put(c, linkNames);
+    return c;
+  }
+
+  public ChannelType setID(ChannelType channelType, String id) {
+    channelType.setID(id);
+    channelTypeMap.put(id, channelType);
+    return channelType;
+  }
+
+  public Channel setID(Channel channel, String id) {
+    channel.setID(id);
+    channelMap.put(id, channel);
+    return channel;
+  }
+
+  //--- Item ---
+
+  public Item createItem() {
+    ItemPrototype result = new ItemPrototype();
+    result.disableSendState();
+    return result;
+  }
+
+  public Item setCategory(Item item, String categoryName) {
+    missingItemCategoryMap.put(item, categoryName);
+    return item;
+  }
+
+  public Item setMetaData(Item item, StringKeyMap metaData) {
+    for (AbstractMap.SimpleEntry<String, String> entry : metaData) {
+      item.addMetaData(new ItemMetaData(entry.getKey(), entry.getValue()));
+    }
+    return item;
+  }
+
+  public Item setControlling(Item item, StringList controlling) {
+    missingControllingListMap.put(item, controlling);
+    return item;
+  }
+
+  public Item retype(Item itemWithCorrectType, Item prototype) {
+    itemWithCorrectType.setID(prototype.getID());
+    itemWithCorrectType.setLabel(prototype.getLabel());
+    itemWithCorrectType.setMetaDataList(prototype.getMetaDataList());
+    if (!(itemWithCorrectType instanceof ActivityItem)) {
+      String state = prototype.getStateAsString();
+      itemWithCorrectType.disableSendState();
+      if (state.isEmpty()) {
+        itemWithCorrectType.setStateToDefault();
+      } else {
+        itemWithCorrectType.setStateFromString(state);
+      }
+      itemWithCorrectType.enableSendState();
+    }
+
+    moveMissingForRetype(itemWithCorrectType, prototype, missingTopicMap);
+    moveMissingForRetype(itemWithCorrectType, prototype, missingControllingListMap);
+    moveMissingForRetype(itemWithCorrectType, prototype, missingItemCategoryMap);
+
+    itemMap.put(prototype.getID(), itemWithCorrectType);
+
+    itemOrder.add(itemWithCorrectType);
+
+    return itemWithCorrectType;
+  }
+
+  private <T> void moveMissingForRetype(Item itemWithCorrectType, Item prototype, Map<Item, T> missingXMap) {
+    T value = missingXMap.get(prototype);
+    if (value != null) {
+      missingXMap.put(itemWithCorrectType, value);
+    }
+    missingXMap.remove(prototype);
+  }
+
+  public Item setID(Item item, String id) {
+    item.setID(id);
+    itemMap.put(id, item);
+    return item;
+  }
+
+  //--- Group ---
+
   public Group setSubGroups(Group g, StringList subGroupNames) {
     missingSubGroupListMap.put(g, subGroupNames);
     return g;
@@ -278,77 +356,77 @@ public class EraserParserHelper {
     return g;
   }
 
-  public ThingType setChannelTypes(ThingType tt, StringList channelTypeNames) {
-    missingChannelTypeListMap.put(tt, channelTypeNames);
-    return tt;
+  public Group setID(Group group, String id) {
+    group.setID(id);
+    groupMap.put(id, group);
+    return group;
   }
 
+  //--- Parameter ---
+
   public Parameter setParameterValueType(Parameter p, String pvt) {
-    p.setType(ParameterValueType.valueOf(toTitleCase(pvt)));
+    p.setType(ParameterValueType.valueOf(JavaUtils.toTitleCase(pvt)));
     return p;
   }
 
-  private String toTitleCase(String s) {
-    if (s == null || s.isEmpty()) {
-      return s;
-    }
-    return s.substring(0, 1).toUpperCase() + s.substring(1);
-  }
-
   public Parameter setDefault(Parameter p, String defaultValue) {
     p.setDefaultValue(new ParameterDefaultValue(defaultValue));
     return p;
   }
 
-  public ThingType setParameters(ThingType tt, StringList parameterNames) {
-    missingParameterListMap.put(tt, parameterNames);
-    return tt;
+  public Parameter setID(Parameter parameter, String id) {
+    parameter.setID(id);
+    parameterMap.put(id, parameter);
+    return parameter;
   }
 
-  public Thing setChannels(Thing t, StringList channelNames) {
-    missingChannelListMap.put(t, channelNames);
-    return t;
-  }
+  //--- MQTT ---
 
-  public Channel setChannelType(Channel c, String channelTypeName) {
-    missingChannelTypeMap.put(c, channelTypeName);
-    return c;
+  public Item setTopic(Item item, String mqttTopicName) {
+    missingTopicMap.put(item, mqttTopicName);
+    return item;
   }
 
-  public Channel setLinks(Channel c, StringList linkNames) {
-    missingItemLinkListMap.put(c, linkNames);
-    return c;
+  //--- Activity ---
+
+  public MachineLearningRoot setActivities(MachineLearningRoot mlr, IntegerKeyMap map) {
+    for (AbstractMap.SimpleEntry<Integer, String> entry : map) {
+      Activity activity = new Activity();
+      activity.setIdentifier(entry.getKey());
+      activity.setLabel(entry.getValue());
+      mlr.addActivity(activity);
+    }
+    return mlr;
   }
 
+  //--- Root ---
+
   public Root createRoot() {
-    this.root = new Root();
-    this.root.setMqttRoot(new MqttRoot());
-    this.root.setInfluxRoot(InfluxRoot.createDefault());
-    this.root.setMachineLearningRoot(new MachineLearningRoot());
+    this.root = Root.createEmptyRoot();
     return this.root;
   }
 
   public Root createRoot(Thing t) {
     Root result = createRoot();
-    result.addThing(t);
+    result.getOpenHAB2Model().addThing(t);
     return result;
   }
 
   public Root createRoot(Group g) {
     Root result = createRoot();
-    result.addGroup(g);
+    result.getOpenHAB2Model().addGroup(g);
     return result;
   }
 
   public Root createRoot(ThingType tt) {
     Root result = createRoot();
-    result.addThingType(tt);
+    result.getOpenHAB2Model().addThingType(tt);
     return result;
   }
 
   public Root createRoot(ChannelType ct) {
     Root result = createRoot();
-    result.addChannelType(ct);
+    result.getOpenHAB2Model().addChannelType(ct);
     return result;
   }
 
@@ -364,83 +442,10 @@ public class EraserParserHelper {
     return result;
   }
 
-  public Item createItem() {
-    ItemPrototype result = new ItemPrototype();
-    result.disableSendState();
+  public Root createRoot(MachineLearningRoot ml) {
+    Root result = createRoot();
+    result.setMachineLearningRoot(ml);
     return result;
   }
 
-  public Item retype(Item itemWithCorrectType, Item prototype) {
-    itemWithCorrectType.setID(prototype.getID());
-    itemWithCorrectType.setLabel(prototype.getLabel());
-    itemWithCorrectType.setMetaDataList(prototype.getMetaDataList());
-    String state = prototype.getStateAsString();
-    itemWithCorrectType.disableSendState();
-    if (state.isEmpty()) {
-      itemWithCorrectType.setStateToDefault();
-    } else {
-      itemWithCorrectType.setStateFromString(state);
-    }
-    itemWithCorrectType.enableSendState();
-
-    moveMissingForRetype(itemWithCorrectType, prototype, missingTopicMap);
-    moveMissingForRetype(itemWithCorrectType, prototype, missingControllingListMap);
-    moveMissingForRetype(itemWithCorrectType, prototype, missingItemCategoryMap);
-
-    itemMap.put(prototype.getID(), itemWithCorrectType);
-
-    itemOrder.add(itemWithCorrectType);
-
-    return itemWithCorrectType;
-  }
-
-  private <T> void moveMissingForRetype(Item itemWithCorrectType, Item prototype, Map<Item, T> missingXMap) {
-    T value = missingXMap.get(prototype);
-    if (value != null) {
-      missingXMap.put(itemWithCorrectType, value);
-    }
-    missingXMap.remove(prototype);
-  }
-
-  public Thing setID(Thing thing, String id) {
-    thing.setID(id);
-    return thing;
-  }
-
-  public Item setID(Item item, String id) {
-    item.setID(id);
-    itemMap.put(id, item);
-    return item;
-  }
-
-  public Group setID(Group group, String id) {
-    group.setID(id);
-    groupMap.put(id, group);
-    return group;
-  }
-
-  public ThingType setID(ThingType thingType, String id) {
-    thingType.setID(id);
-    thingTypeMap.put(id, thingType);
-    return thingType;
-  }
-
-  public Parameter setID(Parameter parameter, String id) {
-    parameter.setID(id);
-    parameterMap.put(id, parameter);
-    return parameter;
-  }
-
-  public ChannelType setID(ChannelType channelType, String id) {
-    channelType.setID(id);
-    channelTypeMap.put(id, channelType);
-    return channelType;
-  }
-
-  public Channel setID(Channel channel, String id) {
-    channel.setID(id);
-    channelMap.put(id, channel);
-    return channel;
-  }
-
 }
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/JavaUtils.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/JavaUtils.java
index 5a66f6a1d321480e3356c37c72f90998ad888d77..79e02fb74f53357b69b464e472801afbf311c3fd 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/JavaUtils.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/JavaUtils.java
@@ -45,4 +45,11 @@ public class JavaUtils {
     return StreamSupport.stream(jastAddList.spliterator(), false);
   }
 
+  public static String toTitleCase(String s) {
+    if (s == null || s.isEmpty()) {
+      return s;
+    }
+    return s.substring(0, 1).toUpperCase() + s.substring(1);
+  }
+
 }
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MemberPrinter.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MemberPrinter.java
index 7f9bb0e29dfa838242d6ad4b15ee1f83f5481366..a6c89a42bca8e978f837ad805ec4278634f3afa4 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MemberPrinter.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MemberPrinter.java
@@ -17,7 +17,9 @@ import java.util.function.Supplier;
 public class MemberPrinter {
 
   private static final Logger logger = LogManager.getLogger(MemberPrinter.class);
-  private boolean addSemicolonNewlineWhenBuild;
+  private static boolean addSemicolonNewlineWhenBuild = true;
+  private static boolean emitNothingOnEmptyBody = true;
+  private boolean empty = true;
   private final StringBuilder sb;
 
   public enum ListBracketType {
@@ -35,14 +37,28 @@ public class MemberPrinter {
   }
 
   public MemberPrinter(String elementName) {
-    this.addSemicolonNewlineWhenBuild = true;
     this.sb = new StringBuilder();
     if (elementName != null && !elementName.isEmpty()) {
       sb.append(elementName).append(":");
     }
   }
 
+  public static void setAddSemicolonNewlineWhenBuild(boolean addSemicolonNewlineWhenBuild) {
+    MemberPrinter.addSemicolonNewlineWhenBuild = addSemicolonNewlineWhenBuild;
+  }
+
+  public static void setEmitNothingOnEmptyBody(boolean emitNothingOnEmptyBody) {
+    MemberPrinter.emitNothingOnEmptyBody = emitNothingOnEmptyBody;
+  }
+
   public String build() {
+    if (empty) {
+      if (emitNothingOnEmptyBody) {
+        return "";
+      } else {
+        logger.debug("Emitting empty body {}", sb);
+      }
+    }
     if (addSemicolonNewlineWhenBuild) {
       sb.append(" ;\n");
     }
@@ -117,6 +133,7 @@ public class MemberPrinter {
       sb.append(' ').append(name).append("=[");
       concatIds(listOfElements);
       sb.append("]");
+      this.empty = false;
     }
     return this;
   }
@@ -136,6 +153,7 @@ public class MemberPrinter {
       sb.append(' ').append(name).append("=[");
       concatIds(listOfNodes, mapping);
       sb.append("]");
+      this.empty = false;
     }
     return this;
   }
@@ -171,6 +189,7 @@ public class MemberPrinter {
       sb.append(' ').append(name).append("=").append(bracketType.begin);
       concatNodes(listOfNodes, mapping, false);
       sb.append(bracketType.end);
+      this.empty = false;
     }
     return this;
   }
@@ -197,6 +216,7 @@ public class MemberPrinter {
    */
   public MemberPrinter addOptionalPrettyPrint(ASTNode child) {
     if (child != null) {
+      this.empty = false;
       sb.append(child.prettyPrint());
     }
     return this;
@@ -210,6 +230,7 @@ public class MemberPrinter {
    */
   public MemberPrinter addFlag(String name, boolean isSet) {
     if (isSet) {
+      this.empty = false;
       sb.append(' ').append(name);
     }
     return this;
@@ -234,6 +255,7 @@ public class MemberPrinter {
    */
   public MemberPrinter addNonDefault(String name, Object actualValue, Object defaultValue) {
     if (!actualValue.equals(defaultValue)) {
+      this.empty = false;
       return add(name, actualValue);
     }
     return this;
@@ -275,6 +297,7 @@ public class MemberPrinter {
   }
 
   private MemberPrinter add(String name, Object actualValue) {
+    this.empty = false;
     sb.append(' ').append(name).append("=\"").append(actualValue).append("\"");
     return this;
   }
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MqttReceiver.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MqttReceiver.java
index e433bf34d3b2e4848291a766ca3b5e1c55a80dc3..1f013ea25872e57e452a603616e95126bdd69e11 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MqttReceiver.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/MqttReceiver.java
@@ -1,6 +1,5 @@
 package de.tudresden.inf.st.eraser.util;
 
-import de.tudresden.inf.st.eraser.openhab2.mqtt.MQTTUpdater;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.fusesource.hawtbuf.Buffer;
@@ -40,7 +39,7 @@ public class MqttReceiver implements AutoCloseable {
   private QoS qos;
 
   public MqttReceiver() {
-    this.logger = LogManager.getLogger(MQTTUpdater.class);
+    this.logger = LogManager.getLogger(MqttReceiver.class);
     this.readyLock = new ReentrantLock();
     this.readyCondition = readyLock.newCondition();
     this.ready = false;
@@ -50,8 +49,8 @@ public class MqttReceiver implements AutoCloseable {
   /**
    * Sets the host to receive messages from
    */
-  public void setHost(String host) {
-    this.host = URI.create("tcp://" + host + ":1883");
+  public void setHost(String host, int port) {
+    this.host = URI.create("tcp://" + host + ":" + port);
     logger.debug("Host is {}", this.host);
   }
 
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/ParserUtils.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/ParserUtils.java
index 4147e65d032584a27e77c019f7e1f2172b6aa807..5eca1e63816e64d4c5811abcde3861b3b00dee14 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/ParserUtils.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/ParserUtils.java
@@ -3,10 +3,7 @@ package de.tudresden.inf.st.eraser.util;
 import beaver.Parser;
 import beaver.Scanner;
 import beaver.Symbol;
-import de.tudresden.inf.st.eraser.jastadd.model.Group;
-import de.tudresden.inf.st.eraser.jastadd.model.Item;
-import de.tudresden.inf.st.eraser.jastadd.model.MqttTopic;
-import de.tudresden.inf.st.eraser.jastadd.model.Root;
+import de.tudresden.inf.st.eraser.jastadd.model.*;
 import de.tudresden.inf.st.eraser.jastadd.parser.EraserParser;
 import de.tudresden.inf.st.eraser.jastadd.scanner.EraserScanner;
 import org.apache.logging.log4j.LogManager;
@@ -16,8 +13,8 @@ import java.io.*;
 import java.net.URL;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import java.util.*;
-import java.util.stream.Collectors;
+import java.util.Collection;
+import java.util.Objects;
 
 /**
  * Utility methods involving scanner and parser of the models.
@@ -29,6 +26,38 @@ public class ParserUtils {
   private static boolean verboseLoading = false;
   private static final Logger logger = LogManager.getLogger(ParserUtils.class);
 
+  private interface ReaderProvider {
+    Reader provide() throws IOException;
+  }
+
+  private static class ReaderProviderByName implements ReaderProvider {
+    private final String filename;
+    private final Class<?> clazz;
+
+    ReaderProviderByName(String filename, Class<?> clazz) {
+      this.filename = filename;
+      this.clazz = clazz;
+    }
+
+    @Override
+    public Reader provide() throws IOException {
+      return getReader(filename, clazz);
+    }
+  }
+
+  private static class ReaderProviderByURL implements ReaderProvider {
+    private final URL url;
+
+    ReaderProviderByURL(URL url) {
+      this.url = url;
+    }
+
+    @Override
+    public Reader provide() throws IOException {
+      return new InputStreamReader(url.openStream());
+    }
+  }
+
   /**
    * Print read tokens before loading the model.
    * This will effectively parse the input two times, thus slowing the operation.
@@ -71,8 +100,23 @@ public class ParserUtils {
    */
   public static Root load(String fileName, Class<?> clazz) throws IOException, Parser.Exception {
     logger.info("Loading model DSL file '{}'", fileName);
+    return load(new ReaderProviderByName(fileName, clazz));
+  }
 
-    Reader reader = getReader(fileName, clazz);
+  /**
+   * Loads a model in a file from the given URL.
+   * @param url an URL pointing to a file
+   * @return the parsed model
+   * @throws IOException if the file could not be found, or opened
+   * @throws Parser.Exception if the file contains a malformed model
+   */
+  public static Root load(URL url) throws IOException, Parser.Exception {
+    logger.info("Loading model DSL from '{}'", url);
+    return load(new ReaderProviderByURL(url));
+  }
+
+  private static Root load(ReaderProvider readerProvider) throws IOException, Parser.Exception {
+    Reader reader = readerProvider.provide();
     if (verboseLoading) {
       EraserScanner scanner = new EraserScanner(reader);
       try {
@@ -87,7 +131,7 @@ public class ParserUtils {
         try {
           reader.reset();
         } catch (IOException resetEx) {
-          reader = getReader(fileName, clazz);
+          reader = readerProvider.provide();
         }
       }
     }
@@ -120,7 +164,12 @@ public class ParserUtils {
     }
   }
 
-  public static void createUnknownGroup(Root model, Collection<Item> danglingItems) {
+  /**
+   * Create well-known group call "Unknown" and add all dangling items to it.
+   * @param model         The model to operate on
+   * @param danglingItems A list of items to add to the new group
+   */
+  public static void createUnknownGroup(OpenHAB2Model model, Collection<Item> danglingItems) {
     Group unknownGroup = new Group();
     unknownGroup.setID("Unknown");
     model.addGroup(unknownGroup);
@@ -128,55 +177,19 @@ public class ParserUtils {
     logger.info("Created new {}", unknownGroup.prettyPrint().trim());
   }
 
-  public static void createMqttTopic(Item item, String topicName, Root root) {
-    String[] parts = topicName.split("/");
-    String firstPart = parts[0];
-    MqttTopic firstTopic = null;
-    for (MqttTopic topic : root.getMqttRoot().getTopicList()) {
-      if (topic.getPart().equals(firstPart)) {
-        firstTopic = topic;
-        break;
-      }
-    }
-    if (firstTopic == null) {
-      // no matching topic found for first part. create one.
-      firstTopic = createTopic(firstPart, root);
-    }
-//    MqttTopic firstTopic = firstPartTopicMap.computeIfAbsent(firstPart, part -> createTopic(part, root));
-    MqttTopic lastTopic = processRemainingTopicParts(firstTopic, parts, 1);
-    item.setTopic(lastTopic);
-  }
-
-  private static MqttTopic processRemainingTopicParts(MqttTopic topic, String[] parts, int index) {
-    if (index >= parts.length) {
-      return topic;
-    }
-    for (MqttTopic subTopic : topic.getSubTopicList()) {
-      if (subTopic.getPart().equals(parts[index])) {
-        // matching part found
-        return processRemainingTopicParts(subTopic, parts, index + 1);
-      }
-    }
-    // no matching part was found. create remaining topics.
-    for (int currentIndex = index; currentIndex < parts.length; currentIndex++) {
-      MqttTopic newTopic = createSubTopic(parts[currentIndex]);
-      topic.addSubTopic(newTopic);
-      topic = newTopic;
-    }
-    return topic;
-  }
-
-  private static MqttTopic createSubTopic(String part) {
-    return createTopic(part, null);
-  }
-
-  private static MqttTopic createTopic(String part, Root root) {
-    MqttTopic result = new MqttTopic();
-    result.setPart(part);
-    if (root != null) {
+  /**
+   * Create a topic for the given topic name and assign it to the given item.
+   * @param item        The item to which the topic will be assigned to
+   * @param topicSuffix The full topic name
+   * @param root        The model to operate on
+   */
+  public static void createMqttTopic(Item item, String topicSuffix, Root root) {
+    item.setTopic(root.getMqttRoot().resolveTopicSuffix(topicSuffix).orElseGet(() -> {
+      MqttTopic result = new MqttTopic();
+      result.setTopicString(topicSuffix);
       root.getMqttRoot().addTopic(result);
-    }
-    return result;
+      return result;
+    }));
   }
 
 }
diff --git a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/TestUtils.java b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/TestUtils.java
index f31ffd14dc1b8a017396a1efc5e15b76d65110bd..5f07ec7c576829975db82121e42e142497b49586 100644
--- a/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/TestUtils.java
+++ b/eraser-base/src/main/java/de/tudresden/inf/st/eraser/util/TestUtils.java
@@ -10,9 +10,9 @@ import de.tudresden.inf.st.eraser.jastadd.model.*;
 public class TestUtils {
 
   public static class ModelAndItem {
-    public Root model;
+    public OpenHAB2Model model;
     public NumberItem item;
-    static ModelAndItem of(Root model, NumberItem item) {
+    static ModelAndItem of(OpenHAB2Model model, NumberItem item) {
       ModelAndItem result = new ModelAndItem();
       result.model = model;
       result.item = item;
@@ -25,21 +25,21 @@ public class TestUtils {
   }
 
   public static ModelAndItem createModelAndItem(double initialValue, boolean useUpdatingItem) {
-    Root model = Root.createEmptyRoot();
+    Root root = Root.createEmptyRoot();
     Group g = new Group();
-    model.addGroup(g);
+    root.getOpenHAB2Model().addGroup(g);
     g.setID("group1");
 
     NumberItem item = addItemTo(g, initialValue, useUpdatingItem);
 
-    return ModelAndItem.of(model, item);
+    return ModelAndItem.of(root.getOpenHAB2Model(), item);
   }
 
-  public static NumberItem addItemTo(Root model, double initialValue) {
+  public static NumberItem addItemTo(OpenHAB2Model model, double initialValue) {
     return addItemTo(model, initialValue, false);
   }
 
-  public static NumberItem addItemTo(Root model, double initialValue, boolean useUpdatingItem) {
+  public static NumberItem addItemTo(OpenHAB2Model model, double initialValue, boolean useUpdatingItem) {
     return addItemTo(getDefaultGroup(model), initialValue, useUpdatingItem);
   }
 
@@ -56,7 +56,7 @@ public class TestUtils {
     return item;
   }
 
-  public static Group getDefaultGroup(Root model) {
+  public static Group getDefaultGroup(OpenHAB2Model model) {
     // use first found group
     return model.getGroup(0);
   }
diff --git a/eraser-base/src/main/resources/log4j2.xml b/eraser-base/src/main/resources/log4j2.xml
index 89799a2f09ba34d288e610d960b3ed6348213105..18175a02521156259c8789745fb849fa893302e9 100644
--- a/eraser-base/src/main/resources/log4j2.xml
+++ b/eraser-base/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ControllingItemTest.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ControllingItemTest.java
index 75773e80bdd0ed7ad5ca6a9d13accb28c0f05a54..edc98e72719ef63ffa63f0cb19aac9b7106ab879 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ControllingItemTest.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ControllingItemTest.java
@@ -6,6 +6,8 @@ import de.tudresden.inf.st.eraser.jastadd.model.*;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.util.function.Consumer;
+
 /**
  * Testing the simple rule engine.
  *
@@ -56,26 +58,22 @@ public class ControllingItemTest {
   @Test
   public void testManyToOneColor() {
     ModelAndItem mai = TestUtils.createModelAndItem(0);
-    Root model = mai.model;
+    OpenHAB2Model model = mai.model;
     NumberItem numberItem = mai.item;
 
     Group g = TestUtils.getDefaultGroup(model);
 
-    StringItem stringItem = new StringItem();
-    stringItem.setState("0");
-    initAndAddItem(g, stringItem);
+    StringItem stringItem = initAndAddItem(g, new StringItem(),
+        item -> item.setState("0"));
 
-    SwitchItem booleanItem = new SwitchItem();
-    booleanItem.setState(false);
-    initAndAddItem(g, booleanItem);
+    SwitchItem booleanItem = initAndAddItem(g, new SwitchItem(),
+        item -> item.setState(false));
 
-    ColorItem colorItem = new ColorItem();
-    colorItem.setState(TupleHSB.of(0, 0, 0));
-    initAndAddItem(g, colorItem);
+    ColorItem colorItem = initAndAddItem(g, new ColorItem(),
+        item -> item.setState(TupleHSB.of(0, 0, 0)));
 
-    ColorItem target = new ColorItem();
-    target.setState(TupleHSB.of(0, 0, 0));
-    initAndAddItem(g, target);
+    ColorItem target = initAndAddItem(g, new ColorItem(),
+        item -> item.setState(TupleHSB.of(0, 0, 0)));
 
     target.addControlledBy(numberItem);
     target.addControlledBy(stringItem);
@@ -119,9 +117,11 @@ public class ControllingItemTest {
     Assert.assertEquals("Item was not controlled correctly", TupleHSB.of(33, 33, 44), target.getState());
   }
 
-  private void initAndAddItem(Group group, Item item) {
-    item.enableSendState();
+  private <T extends Item> T initAndAddItem(Group group, T item, Consumer<T> setState) {
     item.setID("item" + group.getNumItem());
     group.addItem(item);
+    setState.accept(item);
+    item.enableSendState();
+    return item;
   }
 }
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/DecisionTreeTest.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/DecisionTreeTest.java
index 89fbacb6e128f72aea11eccb52155ca422906ac0..fa93e0691d782030d2e45c385f9327a57c9e1ae4 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/DecisionTreeTest.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/DecisionTreeTest.java
@@ -17,7 +17,7 @@ public class DecisionTreeTest {
     TestUtils.ModelAndItem mai = TestUtils.createModelAndItem(4);
 
     DecisionTreeRoot dtroot = new DecisionTreeRoot();
-    mai.model.getMachineLearningRoot().setActivityRecognition(dtroot);
+    mai.model.getRoot().getMachineLearningRoot().setActivityRecognition(dtroot);
     DecisionTreeLeaf isLessThanFour = newLeaf("less than four");
     DecisionTreeLeaf isFourOrGreater = newLeaf("four or greater");
     ItemStateNumberCheck check = new ItemStateNumberCheck(ComparatorType.LessThan, 4f);
@@ -44,7 +44,7 @@ public class DecisionTreeTest {
     TestUtils.ModelAndItem mai = TestUtils.createModelAndItem(20);
 
     DecisionTreeRoot dtroot = new DecisionTreeRoot();
-    mai.model.getMachineLearningRoot().setActivityRecognition(dtroot);
+    mai.model.getRoot().getMachineLearningRoot().setActivityRecognition(dtroot);
 
     DecisionTreeLeaf isLessThan25 = newLeaf("less than 25");
     DecisionTreeLeaf is25OrGreater = newLeaf("25 or greater");
@@ -130,7 +130,7 @@ public class DecisionTreeTest {
     TestUtils.ModelAndItem mai = TestUtils.createModelAndItem(0);
 
     DecisionTreeRoot dtroot = new DecisionTreeRoot();
-    mai.model.getMachineLearningRoot().setActivityRecognition(dtroot);
+    mai.model.getRoot().getMachineLearningRoot().setActivityRecognition(dtroot);
     DecisionTreeLeaf left = newLeaf("left");
     DecisionTreeLeaf right = newLeaf("right");
     ItemStateNumberCheck check = new ItemStateNumberCheck(comparatorType, expected);
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/InfluxTest.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/InfluxTest.java
index 5fd3a3ac2320207da3ebf2bc7ff91c1470efeec8..a7d76202ffc30da22280f9a5ec95c643cb4aee42 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/InfluxTest.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/InfluxTest.java
@@ -1,11 +1,12 @@
 package de.tudresden.inf.st.eraser;
 
+import de.tudresden.inf.st.eraser.jastadd.model.*;
 import de.tudresden.inf.st.eraser.util.TestUtils;
 import de.tudresden.inf.st.eraser.util.TestUtils.ModelAndItem;
-import de.tudresden.inf.st.eraser.jastadd.model.*;
 import org.junit.*;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
+import org.testcontainers.containers.InfluxDBContainer;
 
 import java.time.Instant;
 import java.util.ArrayList;
@@ -29,6 +30,12 @@ public class InfluxTest {
 
   private ModelAndItem mai;
 
+  @ClassRule
+  public static InfluxDBContainer influxDbContainer = new InfluxDBContainer()
+      .withDatabase(InfluxRoot.createDefault().getDbName())
+      .withUsername(InfluxRoot.createDefault().getUser())
+      .withPassword(InfluxRoot.createDefault().getPassword());
+
   @Test
   public void oneItem() {
     NumberItem item = mai.item;
@@ -95,8 +102,8 @@ public class InfluxTest {
 
   @Test
   public void justAdapter() {
-    InfluxAdapter influxAdapter = mai.model.getInfluxRoot().influxAdapter();
-    Assume.assumeTrue("Adapter not connected", influxAdapter.isConnected());
+    InfluxAdapter influxAdapter = getInfluxRoot().influxAdapter();
+    Assert.assertTrue("Adapter not connected", influxAdapter.isConnected());
     influxAdapter.deleteDatabase();
 
     // write one point
@@ -151,12 +158,16 @@ public class InfluxTest {
       influxRoot.setDbName(InfluxTest.class.getSimpleName());
       influxRoot.setHostByName("vm2098.zih.tu-dresden.de");
     }
-    mai.model.setInfluxRoot(influxRoot);
+    mai.model.getRoot().setInfluxRoot(influxRoot);
     Assume.assumeTrue(influxRoot.influxAdapter().isConnected());
     influxRoot.influxAdapter().deleteDatabase();
     return mai;
   }
 
+  private InfluxRoot getInfluxRoot() {
+    return mai.model.getRoot().getInfluxRoot();
+  }
+
   private List<DoubleStatePoint> query(ItemWithDoubleState... allItems) {
     if (useStub) {
       return points;
@@ -172,13 +183,13 @@ public class InfluxTest {
   @Before
   public void setNewModel() {
     mai = createModel();
-    mai.model.getInfluxRoot().influxAdapter().disableAsyncQuery();
+    getInfluxRoot().influxAdapter().disableAsyncQuery();
   }
 
   @After
   public void closeInfluxAdapter() throws Exception {
     if (mai != null && mai.model != null) {
-      mai.model.getInfluxRoot().influxAdapter().close();
+      getInfluxRoot().influxAdapter().close();
     }
   }
 
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ItemTests.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ItemTests.java
index 0b02f08b9d6c12e2ac51935d2c1da8bb7660c5e6..ffda213bea9a359097db8b962f03a48e52502c57 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ItemTests.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ItemTests.java
@@ -1,9 +1,12 @@
 package de.tudresden.inf.st.eraser;
 
 import de.tudresden.inf.st.eraser.jastadd.model.*;
+import de.tudresden.inf.st.eraser.util.TestUtils;
+import de.tudresden.inf.st.eraser.util.TestUtils.ModelAndItem;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.time.Instant;
 import java.util.Date;
 
 /**
@@ -15,7 +18,7 @@ public class ItemTests {
 
   @Test
   public void testItemWithBooleanStateEquals() {
-    ItemWithBooleanState sut = new SwitchItem();
+    ItemWithBooleanState sut = createItem(SwitchItem::new);
 
     sut.setState(true);
     Assert.assertTrue("State 'true' should match 'true'", sut.stateEquals(true));
@@ -28,7 +31,7 @@ public class ItemTests {
 
   @Test
   public void testItemWithStringStateEquals() {
-    ItemWithStringState sut = new ImageItem();
+    ItemWithStringState sut = createItem(ImageItem::new);
 
     sut.setState("correct");
     Assert.assertTrue("State 'correct' should match 'correct'",
@@ -39,7 +42,7 @@ public class ItemTests {
 
   @Test
   public void testItemWithDoubleStateEquals() {
-    ItemWithDoubleState sut = new NumberItem();
+    ItemWithDoubleState sut = createItem(NumberItem::new);
 
     sut.setState(3.0);
     Assert.assertTrue("State '3.0' should match '3.0'", sut.stateEquals(3.0));
@@ -52,7 +55,7 @@ public class ItemTests {
 
   @Test
   public void testItemWithTupleHSBStateEquals() {
-    ColorItem sut = new ColorItem();
+    ColorItem sut = createItem(ColorItem::new);
 
     sut.setState(TupleHSB.of(1, 2, 3));
     Assert.assertTrue("State 'TupleHSB(1,2,3)' should match 'TupleHSB(1,2,3)'",
@@ -71,73 +74,27 @@ public class ItemTests {
 
   @Test
   public void testItemWithDateStateEquals() {
-    DateTimeItem sut = new DateTimeItem();
+    DateTimeItem sut = createItem(DateTimeItem::new);
 
-    sut.setState(new Date(1543415826));
+    sut.setState(Instant.ofEpochMilli(1543415826));
     Assert.assertTrue("State 'Date(1543415826)' should match 'Date(1543415826)'",
-        sut.stateEquals(new Date(1543415826)));
+        sut.stateEquals(Instant.ofEpochMilli(1543415826)));
     Assert.assertFalse("State 'Date(1543415826)' should not match 'Date(4)'",
-        sut.stateEquals(new Date(4)));
+        sut.stateEquals(Instant.ofEpochMilli(4)));
   }
 
-  @Test
-  public void testItemWithBooleanCopyEquals() {
-    ItemWithBooleanState sut = new SwitchItem();
-
-    sut.setState(true);
-    Object copiedState = sut.copyState();
-    Assert.assertTrue("State 'true' should match copy", sut.stateEquals(copiedState));
-
-    sut.setState(false);
-    Assert.assertFalse("State 'false' should not match copy", sut.stateEquals(copiedState));
-  }
-
-  @Test
-  public void testItemWithStringCopyEquals() {
-    ItemWithStringState sut = new ImageItem();
-
-    sut.setState("correct");
-    Object copiedState = sut.copyState();
-    Assert.assertTrue("State 'correct' should match copy", sut.stateEquals(copiedState));
-
-    sut.setState("something else");
-    Assert.assertFalse("State 'something else' should not match copy", sut.stateEquals(copiedState));
+  @FunctionalInterface
+  private interface CreateItem<T extends Item> {
+    T create();
   }
 
-  @Test
-  public void testItemWithLongCopyEquals() {
-    ItemWithDoubleState sut = new NumberItem();
-
-    sut.setState(3.0);
-    Object copiedState = sut.copyState();
-    Assert.assertTrue("State '3.0' should match copy", sut.stateEquals(copiedState));
-
-    sut.setState(4.0);
-    Assert.assertFalse("State '4.0' should not match copy", sut.stateEquals(copiedState));
-  }
-
-  @Test
-  public void testItemWithTupleHSBCopyEquals() {
-    ColorItem sut = new ColorItem();
-
-    sut.setState(TupleHSB.of(1, 2, 3));
-    Object copiedState = sut.copyState();
-    Assert.assertTrue("State 'TupleHSB(1,2,3)' should match copy", sut.stateEquals(copiedState));
-
-    sut.setState(TupleHSB.of(5,5,5));
-    Assert.assertFalse("State 'TupleHSB(5,5,5)' should not match copy", sut.stateEquals(copiedState));
-  }
-
-  @Test
-  public void testItemWithDateCopyEquals() {
-    DateTimeItem sut = new DateTimeItem();
-
-    sut.setState(new Date(1543415826));
-    Object copiedState = sut.copyState();
-    Assert.assertTrue("State 'Date(1543415826' should match copy", sut.stateEquals(copiedState));
-
-    sut.setState(new Date(4));
-    Assert.assertFalse("State 'Date(4)' should not match copy", sut.stateEquals(copiedState));
+  private <T extends  Item> T createItem(CreateItem<T> creator) {
+    // create a root with default group and one unused item
+    ModelAndItem mai = TestUtils.createModelAndItem(0);
+    // create wanted item, add it to default group, and return it
+    T result = creator.create();
+    TestUtils.getDefaultGroup(mai.model).addItem(result);
+    return result;
   }
 
 }
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/MqttTests.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/MqttTests.java
index a141d720ff794a7833010fe8c673c80d8f151f49..1c30aa8700ed3e2de0cda9fcb67086e83029db45 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/MqttTests.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/MqttTests.java
@@ -3,11 +3,16 @@ package de.tudresden.inf.st.eraser;
 import de.tudresden.inf.st.eraser.jastadd.model.*;
 import de.tudresden.inf.st.eraser.util.MqttReceiver;
 import de.tudresden.inf.st.eraser.util.TestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Assert;
 import org.junit.Assume;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
+import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.containers.wait.strategy.Wait;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -35,25 +40,29 @@ public class MqttTests {
   private static final double thirdState = 3.0;
 
   private List<String> messages = new ArrayList<>();
+  private static Logger logger = LogManager.getLogger(MqttTests.class);
 
+  @ClassRule
+  public static GenericContainer mqttBroker = new GenericContainer<>("eclipse-mosquitto:1.5")
+      .withExposedPorts(1883);
 
   @Test
   public void resolve1() {
-    RootItemAndTwoTopics rootAB = createAB();
-    MqttRoot sut = rootAB.model.getMqttRoot();
+    ModelItemAndTwoTopics modelAB = createAB();
+    MqttRoot sut = modelAB.model.getRoot().getMqttRoot();
 
     // incoming mqtt topic might be "inc/a/" or "inc/a/b"
 
     Assert.assertTrue(sut.resolveTopic("inc/a").isPresent());
-    Assert.assertEquals("Could not resolve a.", rootAB.firstTopic, sut.resolveTopic("inc/a").get());
+    Assert.assertEquals("Could not resolve a.", modelAB.firstTopic, sut.resolveTopic("inc/a").get());
     Assert.assertTrue(sut.resolveTopic("inc/a/b").isPresent());
-    Assert.assertEquals("Could not resolve a/b.", rootAB.secondTopic, sut.resolveTopic("inc/a/b").get());
+    Assert.assertEquals("Could not resolve a/b.", modelAB.secondTopic, sut.resolveTopic("inc/a/b").get());
   }
 
   @Test
   public void brokerConnected() throws Exception {
-    RootItemAndTwoTopics rootAB = createAB();
-    MqttRoot sut = rootAB.model.getMqttRoot();
+    ModelItemAndTwoTopics modelAB = createAB();
+    MqttRoot sut = modelAB.model.getRoot().getMqttRoot();
 //    MqttRoot mqttRoot = new MqttRoot();
 //    mqttRoot.setHostByName("localhost");
 //    MQTTSender sender = new MQTTSenderImpl().setHost(mqttRoot.getHost());
@@ -67,11 +76,11 @@ public class MqttTests {
   public void itemUpdateSend1() throws Exception {
     String expectedTopic = outgoingPrefix + "/" + firstPart + "/" + secondPart;
 
-    RootItemAndTwoTopics rootAB = createAB();
-    assumeSenderConnected(rootAB.model);
+    ModelItemAndTwoTopics modelAB = createAB();
+    assertSenderConnected(modelAB);
 
-    NumberItem sut = rootAB.item;
-    sut.setTopic(rootAB.secondTopic);
+    NumberItem sut = modelAB.item;
+    sut.setTopic(modelAB.secondTopic);
 
     createMqttReceiver(expectedTopic);
 
@@ -88,29 +97,20 @@ public class MqttTests {
     Assert.assertEquals(Double.toString(secondState), messages.get(1));
   }
 
-  private void assumeSenderConnected(Root model) {
-    Assume.assumeTrue("Broker is not connected", model.getMqttRoot().getMqttSender().isConnected());
-  }
-
   @Test
   public void itemUpdateSend2() throws Exception {
     String expectedTopic1 = outgoingPrefix + "/" + firstPart + "/" + secondPart;
     String expectedTopic2 = outgoingPrefix + "/" + alternativeFirstPart + "/" + secondPart;
 
-    RootItemAndTwoTopics rootAB = createAB();
-    assumeSenderConnected(rootAB.model);
+    ModelItemAndTwoTopics modelAB = createAB();
+    assertSenderConnected(modelAB);
 
-    NumberItem item1 = rootAB.item;
-    item1.setTopic(rootAB.secondTopic);
+    NumberItem item1 = modelAB.item;
+    item1.setTopic(modelAB.secondTopic);
 
-    MqttTopic alternative = new MqttTopic();
-    alternative.setPart(alternativeFirstPart);
-    MqttTopic alternativeB = new MqttTopic();
-    alternativeB.setPart(secondPart);
-    alternative.addSubTopic(alternativeB);
-    rootAB.model.getMqttRoot().addTopic(alternative);
+    MqttTopic alternativeB = createAndAddMqttTopic(modelAB.model.getRoot().getMqttRoot(),alternativeFirstPart + "/" + secondPart);
 
-    NumberItem item2 = TestUtils.addItemTo(rootAB.model, 0);
+    NumberItem item2 = TestUtils.addItemTo(modelAB.model, 0);
     item2.setTopic(alternativeB);
 
     createMqttReceiver(expectedTopic1, expectedTopic2);
@@ -136,6 +136,19 @@ public class MqttTests {
     Assert.assertThat(messages, hasItem(Double.toString(thirdState)));
   }
 
+  private void assertSenderConnected(ModelItemAndTwoTopics modelAB) {
+    MqttRoot mqttRoot = modelAB.model.getRoot().getMqttRoot();
+    mqttBroker.waitingFor(Wait.forHealthcheck());
+    if (!mqttRoot.getMqttSender().isConnected()) {
+      try {
+        Thread.sleep(1000);
+      } catch (InterruptedException e) {
+        logger.catching(e);
+      }
+    }
+    Assert.assertTrue("Broker is not connected", mqttRoot.getMqttSender().isConnected());
+  }
+
   private void createMqttReceiver(String... expectedTopics) throws IOException {
     if (useStub) {
       // do not need receiver, as messages are directly written by MqttSenderStub and callback
@@ -143,7 +156,8 @@ public class MqttTests {
     }
     MqttReceiver receiver = new MqttReceiver();
     List<String> expectedTopicList = Arrays.asList(expectedTopics);
-    receiver.setHost("localhost");
+//    receiver.setHost("localhost", 1883);
+    receiver.setHost(mqttBroker.getContainerIpAddress(), mqttBroker.getFirstMappedPort());
     receiver.setTopicsForSubscription(expectedTopics);
     receiver.setOnMessage((topic, message) -> {
       Assert.assertThat(expectedTopicList, hasItem(topic));
@@ -153,9 +167,9 @@ public class MqttTests {
     receiver.waitUntilReady(2, TimeUnit.SECONDS);
   }
 
-  private RootItemAndTwoTopics createAB() {
+  private ModelItemAndTwoTopics createAB() {
     TestUtils.ModelAndItem mai = TestUtils.createModelAndItem(0);
-    Root model = mai.model;
+    OpenHAB2Model model = mai.model;
     MqttRoot mqttRoot = new MqttRoot();
     mqttRoot.setIncomingPrefix("inc");
     mqttRoot.setOutgoingPrefix(outgoingPrefix);
@@ -164,26 +178,23 @@ public class MqttTests {
       // now a SenderStub is being used
       ((MQTTSenderStub) mqttRoot.getMqttSender()).setCallback(((topic, message, qos) -> messages.add(message)));
     } else {
-      mqttRoot.setHostByName("localhost");
+//      mqttRoot.setHostByName("localhost");
+      mqttRoot.setHost(ExternalHost.of(mqttBroker.getContainerIpAddress(), mqttBroker.getFirstMappedPort()));
     }
-    MqttTopic a = new MqttTopic();
-    a.setPart(firstPart);
-    MqttTopic ab = new MqttTopic();
-    ab.setPart(secondPart);
-    a.addSubTopic(ab);
-    mqttRoot.addTopic(a);
+    MqttTopic a = createAndAddMqttTopic(mqttRoot, firstPart);
+    MqttTopic ab = createAndAddMqttTopic(mqttRoot, firstPart + "/" + secondPart);
     mqttRoot.ensureCorrectPrefixes();
-    model.setMqttRoot(mqttRoot);
-    return RootItemAndTwoTopics.of(model, mai.item, a, ab);
+    model.getRoot().setMqttRoot(mqttRoot);
+    return ModelItemAndTwoTopics.of(model, mai.item, a, ab);
   }
 
-  static class RootItemAndTwoTopics {
-    Root model;
+  static class ModelItemAndTwoTopics {
+    OpenHAB2Model model;
     NumberItem item;
     MqttTopic firstTopic;
     MqttTopic secondTopic;
-    static RootItemAndTwoTopics of(Root model, NumberItem item, MqttTopic firstTopic, MqttTopic secondTopic) {
-      RootItemAndTwoTopics result = new RootItemAndTwoTopics();
+    static ModelItemAndTwoTopics of(OpenHAB2Model model, NumberItem item, MqttTopic firstTopic, MqttTopic secondTopic) {
+      ModelItemAndTwoTopics result = new ModelItemAndTwoTopics();
       result.model = model;
       result.item = item;
       result.firstTopic = firstTopic;
@@ -192,6 +203,13 @@ public class MqttTests {
     }
   }
 
+  private MqttTopic createAndAddMqttTopic(MqttRoot mqttRoot, String suffix) {
+    MqttTopic result = new MqttTopic();
+    result.setTopicString(suffix);
+    mqttRoot.addTopic(result);
+    return result;
+  }
+
   private <T> void assertTimeoutEquals(long seconds, T expected, Supplier<T> actualProvider) throws InterruptedException {
     if (expected == actualProvider.get()) {
       // already matched right now. return immediately.
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/NeuralNetworkTest.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/NeuralNetworkTest.java
index 5d658500c5abb55e18b0ba6c5e5895e4341cdb01..0eb593fd0fc6c3cb3401f315afe33d550e4b3658 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/NeuralNetworkTest.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/NeuralNetworkTest.java
@@ -24,7 +24,7 @@ public class NeuralNetworkTest {
      */
 
     NeuralNetworkRoot neuralNetworkRoot = NeuralNetworkRoot.createEmpty();
-    mai.model.getMachineLearningRoot().setPreferenceLearning(neuralNetworkRoot);
+    mai.model.getRoot().getMachineLearningRoot().setPreferenceLearning(neuralNetworkRoot);
     InputNeuron inputNeuron = new InputNeuron();
     inputNeuron.setItem(mai.item);
     HiddenNeuron hiddenNeuron = new HiddenNeuron();
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/OpenHabImporterTest.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/OpenHabImporterTest.java
index 37766dac143bd79c0a94e83873fd29393078b564..8c109638fea74d074cfc72168303d68a5d2e69a7 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/OpenHabImporterTest.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/OpenHabImporterTest.java
@@ -1,5 +1,6 @@
 package de.tudresden.inf.st.eraser;
 
+import de.tudresden.inf.st.eraser.jastadd.model.OpenHAB2Model;
 import de.tudresden.inf.st.eraser.jastadd.model.Root;
 import de.tudresden.inf.st.eraser.jastadd_test.core.*;
 import de.tudresden.inf.st.eraser.openhab2.OpenHab2Importer;
@@ -70,7 +71,7 @@ public class OpenHabImporterTest {
 
       // call the modified importer, with the static host name, and an arbitrary chosen port
       // port will not be used during the test
-      Root model = importer.importFrom(HOST, 80);
+      OpenHAB2Model model = importer.importFrom(HOST, 80);
 
       if (model == null) {
         if (expected == Result.PARSE_FAILED) return;
@@ -88,7 +89,7 @@ public class OpenHabImporterTest {
         }
       }
 
-      printAndCompare(config, model);
+      printAndCompare(config, model.getRoot());
     }
   }
 
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ParserTests.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ParserTests.java
deleted file mode 100644
index 02a6d30a19592b87a683cd9f0e1e63ae5bf6e3f5..0000000000000000000000000000000000000000
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/ParserTests.java
+++ /dev/null
@@ -1,105 +0,0 @@
-package de.tudresden.inf.st.eraser;
-
-import de.tudresden.inf.st.eraser.util.TestUtils;
-import de.tudresden.inf.st.eraser.util.TestUtils.ModelAndItem;
-import de.tudresden.inf.st.eraser.jastadd.model.MqttRoot;
-import de.tudresden.inf.st.eraser.jastadd.model.NumberItem;
-import de.tudresden.inf.st.eraser.jastadd.model.Root;
-import de.tudresden.inf.st.eraser.util.ParserUtils;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- * Testing helper methods using in parsing.
- *
- * @author rschoene - Initial contribution
- */
-public class ParserTests {
-
-  @Test
-  public void testCreateMqttTopicSimple() {
-    ModelAndItem mai = TestUtils.createModelAndItem(1);
-
-    Root model = mai.model;
-    model.setMqttRoot(new MqttRoot());
-    NumberItem item = mai.item;
-
-    Assert.assertNull(item.getTopic());
-
-    String[] parts = { "one", "two", "three" };
-    String topicName = String.join("/", parts);
-
-    ParserUtils.createMqttTopic(item, topicName, model);
-
-    Assert.assertNotNull(item.getTopic());
-
-    MqttRoot mqttRoot = model.getMqttRoot();
-    Assert.assertEquals("There must be only one topic", 1, mqttRoot.getNumTopic());
-    Assert.assertEquals("First part is wrong",
-        parts[0], mqttRoot.getTopic(0).getPart());
-    Assert.assertEquals("First topic has wrong number of sub-topics",
-        1, mqttRoot.getTopic(0).getNumSubTopic());
-    Assert.assertEquals("Second part is wrong",
-        parts[1], mqttRoot.getTopic(0).getSubTopic(0).getPart());
-    Assert.assertEquals("Second topic has wrong number of sub-topics",
-        1, mqttRoot.getTopic(0).getSubTopic(0).getNumSubTopic());
-    Assert.assertEquals("Third part is wrong",
-        parts[2], mqttRoot.getTopic(0).getSubTopic(0).getSubTopic(0).getPart());
-    Assert.assertEquals("Third part is wrong object",
-        item.getTopic(), mqttRoot.getTopic(0).getSubTopic(0).getSubTopic(0));
-
-    Assert.assertEquals("Name does not match", topicName, item.getTopic().allParts());
-  }
-
-
-  @Test
-  public void testCreateMqttTopicTwoInterleavedTopics() {
-    ModelAndItem mai = TestUtils.createModelAndItem(1);
-
-    Root model = mai.model;
-    model.setMqttRoot(new MqttRoot());
-    NumberItem item1 = mai.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 3);
-
-    Assert.assertNull(item1.getTopic());
-    Assert.assertNull(item2.getTopic());
-
-    String[] parts = { "one", "two", "three" };
-    String otherPart2 = "222";
-    String topicName1 = String.join("/", parts);
-    String topicName2 = String.join("/", parts[0], otherPart2, parts[2]);
-
-    ParserUtils.createMqttTopic(item1, topicName1, model);
-    ParserUtils.createMqttTopic(item2, topicName2, model);
-
-    Assert.assertNotNull(item1.getTopic());
-    Assert.assertNotNull(item2.getTopic());
-
-    MqttRoot mqttRoot = model.getMqttRoot();
-    Assert.assertEquals("There must be only one topic", 1, mqttRoot.getNumTopic());
-    Assert.assertEquals("First part is wrong",
-        parts[0], mqttRoot.getTopic(0).getPart());
-    Assert.assertEquals("First topic has wrong number of sub-topics",
-        2, mqttRoot.getTopic(0).getNumSubTopic());
-    Assert.assertEquals("Second part for first item is wrong",
-        parts[1], mqttRoot.getTopic(0).getSubTopic(0).getPart());
-    Assert.assertEquals("Second part for first item is wrong",
-        otherPart2, mqttRoot.getTopic(0).getSubTopic(1).getPart());
-    Assert.assertEquals("Second topic for first item has wrong number of sub-topics",
-        1, mqttRoot.getTopic(0).getSubTopic(0).getNumSubTopic());
-    Assert.assertEquals("Third part for first item is wrong",
-        parts[2], mqttRoot.getTopic(0).getSubTopic(0).getSubTopic(0).getPart());
-    Assert.assertEquals("Second topic for second item has wrong number of sub-topics",
-        1, mqttRoot.getTopic(0).getSubTopic(1).getNumSubTopic());
-    Assert.assertEquals("Third part for second item is wrong",
-        parts[2], mqttRoot.getTopic(0).getSubTopic(1).getSubTopic(0).getPart());
-    Assert.assertEquals("Third part for first item is wrong object",
-        item1.getTopic(), mqttRoot.getTopic(0).getSubTopic(0).getSubTopic(0));
-    Assert.assertEquals("Third part for second item is wrong object",
-        item2.getTopic(), mqttRoot.getTopic(0).getSubTopic(1).getSubTopic(0));
-
-    Assert.assertEquals("Name for first item does not match", topicName1, item1.getTopic().allParts());
-    Assert.assertEquals("Name for second item does not match", topicName2, item2.getTopic().allParts());
-  }
-
-}
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/RulesTest.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/RulesTest.java
index 1ec9aaef9f40bedd5744c344c55ac7b6338f3da1..2c318278f6b108c4a1c36fae567fe48d832a0734 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/RulesTest.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/RulesTest.java
@@ -50,13 +50,13 @@ public class RulesTest {
   @Test
   public void testUnconditionalLambdaAction() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
 
     Rule rule = new Rule();
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Counter not initialized correctly"), 0, counter.get(item));
@@ -74,11 +74,11 @@ public class RulesTest {
   @Test
   public void testIdempotentActivation() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(4);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
 
     Rule rule = new Rule();
-    model.addRule(rule);
+    root.addRule(rule);
 
     rule.activateFor(item);
     rule.activateFor(item);
@@ -94,18 +94,18 @@ public class RulesTest {
   @Test
   public void testTwoRulesForOneItem() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(4);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
 
     Counters counter1 = new Counters();
     Rule ruleA = new Rule();
     ruleA.addAction(new LambdaAction(counter1));
-    model.addRule(ruleA);
+    root.addRule(ruleA);
 
     Rule ruleB = new Rule();
     Counters counter2 = new Counters();
     ruleB.addAction(new LambdaAction(counter2));
-    model.addRule(ruleB);
+    root.addRule(ruleB);
 
     ruleA.activateFor(item);
     ruleB.activateFor(item);
@@ -129,14 +129,14 @@ public class RulesTest {
   @Test
   public void testOneRuleForTwoItems() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(4);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item1 = modelAndItem.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 4, useUpdatingItem);
+    NumberItem item2 = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
 
     Rule rule = new Rule();
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
 
     rule.activateFor(item1);
     rule.activateFor(item2);
@@ -188,13 +188,13 @@ public class RulesTest {
   @Test
   public void testRemoveActivation() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(4);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
 
     Rule rule = new Rule();
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
 
     rule.activateFor(item);
 
@@ -212,7 +212,7 @@ public class RulesTest {
   @Test
   public void testNumberConditions() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(2);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
 
     Rule rule = new Rule();
@@ -222,7 +222,7 @@ public class RulesTest {
     rule.addCondition(new ItemStateCheckCondition(check2));
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Counter not initialized correctly"), 0, counter.get(item));
@@ -246,7 +246,7 @@ public class RulesTest {
   @Test
   public void testTwoActions() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(2);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
 
     Rule rule = new Rule();
@@ -254,7 +254,7 @@ public class RulesTest {
     rule.addAction(new LambdaAction(counter1));
     Counters counter2 = new Counters();
     rule.addAction(new LambdaAction(counter2));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("First counter not initialized correctly"), 0, counter1.get(item));
@@ -272,9 +272,9 @@ public class RulesTest {
   @Test
   public void testChainedRules() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(2);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 4, useUpdatingItem);
+    NumberItem item2 = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
 
     Rule ruleA = new Rule();
     Counters counter1 = new Counters();
@@ -286,8 +286,8 @@ public class RulesTest {
 
     ruleA.addAction(new TriggerRuleAction(ruleB));
 
-    model.addRule(ruleA);
-    model.addRule(ruleB);
+    root.addRule(ruleA);
+    root.addRule(ruleB);
     ruleA.activateFor(item);
     ruleB.activateFor(item2);
 
@@ -315,15 +315,15 @@ public class RulesTest {
   @Test
   public void testSetStateFromConstantStringAction() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 4, useUpdatingItem);
+    NumberItem item2 = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
 
     Rule rule = new Rule();
     rule.addAction(new SetStateFromConstantStringAction(item2, "5"));
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Affected item not initialized correctly"),
@@ -347,15 +347,15 @@ public class RulesTest {
   public void testSetStateFromLambdaAction() {
     ValuedStateProvider provider = new ValuedStateProvider();
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(0);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 3, useUpdatingItem);
+    NumberItem item2 = TestUtils.addItemTo(root.getOpenHAB2Model(), 3, useUpdatingItem);
 
     Rule rule = new Rule();
     rule.addAction(new SetStateFromLambdaAction(item2, provider));
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Affected item not initialized correctly"),
@@ -389,15 +389,15 @@ public class RulesTest {
   @Test
   public void testSetStateFromTriggeringItemAction() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    StringItem item2 = addStringItem(model, "0");
+    StringItem item2 = addStringItem(root.getOpenHAB2Model(), "0");
 
     Rule rule = new Rule();
     Counters counter = new Counters();
     rule.addAction(new SetStateFromTriggeringItemAction(item2));
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Counter not initialized correctly"), 0, counter.get(item));
@@ -419,10 +419,10 @@ public class RulesTest {
   @Test
   public void testSetStateFromItemsAction() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 4, useUpdatingItem);
-    StringItem affectedItem = addStringItem(model, "1");
+    NumberItem item2 = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
+    StringItem affectedItem = addStringItem(root.getOpenHAB2Model(), "1");
 
     Rule rule = new Rule();
     SetStateFromItemsAction action = new SetStateFromItemsAction(items ->
@@ -435,7 +435,7 @@ public class RulesTest {
     rule.addAction(action);
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Counter not initialized correctly"), 0, counter.get(item));
@@ -467,7 +467,7 @@ public class RulesTest {
         "12", affectedItem.getState());
 
     // add new item to sum
-    NumberItem item3 = TestUtils.addItemTo(model, -4, useUpdatingItem);
+    NumberItem item3 = TestUtils.addItemTo(root.getOpenHAB2Model(), -4, useUpdatingItem);
     action.addSourceItem(item3);
 
     // still 7 + 5 = 12, as rule should not trigger
@@ -486,15 +486,15 @@ public class RulesTest {
   @Test
   public void testAddDoubleToStateAction() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    NumberItem affectedItem = TestUtils.addItemTo(model, 4, useUpdatingItem);
+    NumberItem affectedItem = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
 
     Rule rule = new Rule();
     rule.addAction(new AddDoubleToStateAction(affectedItem, 2));
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Counter not initialized correctly"), 0, counter.get(item));
@@ -523,15 +523,15 @@ public class RulesTest {
   @Test
   public void testMultiplyDoubleToStateAction() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item = modelAndItem.item;
-    NumberItem affectedItem = TestUtils.addItemTo(model, 4, useUpdatingItem);
+    NumberItem affectedItem = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
 
     Rule rule = new Rule();
     rule.addAction(new MultiplyDoubleToStateAction(affectedItem, 2));
     Counters counter = new Counters();
     rule.addAction(new LambdaAction(counter));
-    model.addRule(rule);
+    root.addRule(rule);
     rule.activateFor(item);
 
     Assert.assertEquals(m("Counter not initialized correctly"), 0, counter.get(item));
@@ -560,10 +560,10 @@ public class RulesTest {
   @Test
   public void testChainAddMultiplyActions() {
     TestUtils.ModelAndItem modelAndItem = createModelAndItem(3);
-    Root model = modelAndItem.model;
+    Root root = modelAndItem.model.getRoot();
     NumberItem item1 = modelAndItem.item;
-    NumberItem item2 = TestUtils.addItemTo(model, 4, useUpdatingItem);
-    NumberItem affectedItem = TestUtils.addItemTo(model, 5, useUpdatingItem);
+    NumberItem item2 = TestUtils.addItemTo(root.getOpenHAB2Model(), 4, useUpdatingItem);
+    NumberItem affectedItem = TestUtils.addItemTo(root.getOpenHAB2Model(), 5, useUpdatingItem);
 
     Rule ruleA = new Rule();
     ruleA.addAction(new AddDoubleToStateAction(affectedItem, 2));
@@ -577,8 +577,8 @@ public class RulesTest {
 
     ruleA.addAction(new TriggerRuleAction(ruleB));
 
-    model.addRule(ruleA);
-    model.addRule(ruleB);
+    root.addRule(ruleA);
+    root.addRule(ruleB);
     ruleA.activateFor(item1);
     ruleB.activateFor(item2);
 
@@ -653,7 +653,7 @@ public class RulesTest {
     return message + " (Using " + name + ")";
   }
 
-  private StringItem addStringItem(Root model, String initialValue) {
+  private StringItem addStringItem(OpenHAB2Model model, String initialValue) {
     StringItem item = new StringItem();
     Group group = TestUtils.getDefaultGroup(model);
     item.setID("item" + group.getNumItem());
diff --git a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/jastadd_test/core/TestRunner.java b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/jastadd_test/core/TestRunner.java
index 984fb21d95d17d02b3ef7c07d064c410e300afb2..6978df5cafeee586a7025fc1855d88b1b240a49b 100644
--- a/eraser-base/src/test/java/de/tudresden/inf/st/eraser/jastadd_test/core/TestRunner.java
+++ b/eraser-base/src/test/java/de/tudresden/inf/st/eraser/jastadd_test/core/TestRunner.java
@@ -30,6 +30,7 @@
 package de.tudresden.inf.st.eraser.jastadd_test.core;
 
 import beaver.Parser;
+import de.tudresden.inf.st.eraser.jastadd.model.OpenHAB2Model;
 import de.tudresden.inf.st.eraser.jastadd.model.Root;
 import de.tudresden.inf.st.eraser.util.ParserUtils;
 
@@ -67,9 +68,9 @@ public class TestRunner {
     Result expected = config.expected;
 
     // Parse input model
-    Root model;
+    Root root;
     try {
-      model = parseModel(config);
+      root = parseModel(config);
     } catch (Parser.Exception e) {
       if (expected == Result.PARSE_FAILED) return;
       // otherwise rethrow error
@@ -80,7 +81,7 @@ public class TestRunner {
       fail("Parsing the model should have failed, but was successful!");
     }
 
-    if (model == null) {
+    if (root == null) {
       fail("Parsing the model should have passed, but model was null!");
     }
 
@@ -91,15 +92,15 @@ public class TestRunner {
       return;
     }
 
-    printAndCompare(config, model);
+    printAndCompare(config, root);
   }
 
-  protected static void printAndCompare(TestConfiguration config, Root model) {
+  protected static void printAndCompare(TestConfiguration config, Root root) {
     Result expected = config.expected;
     // Print model.
     String output;
     try {
-      output = printModel(model, config);
+      output = printModel(root, config);
     } catch (Exception e) {
       if (expected == Result.PRINT_FAILED) return;
         // otherwise rethrow error
@@ -149,8 +150,8 @@ public class TestRunner {
     return new File(testDir, "jastadd.err.expected");
   }
 
-  private static String printModel(Root model, TestConfiguration config) {
-    return model.prettyPrint();
+  private static String printModel(Root root, TestConfiguration config) {
+    return root.prettyPrint();
   }
 
   /**
diff --git a/eraser-base/src/test/resources/log4j2-test.xml b/eraser-base/src/test/resources/log4j2-test.xml
index 5c534092d64e9c1834c2ba20208c057e2b56be16..b20ed4ecc82af01ce5278a8d9f0c1aa77a32f00b 100644
--- a/eraser-base/src/test/resources/log4j2-test.xml
+++ b/eraser-base/src/test/resources/log4j2-test.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser-test.log"
+                    filePattern="logs/eraser-test-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
@@ -21,5 +21,14 @@
         <Logger name="de.tudresden.inf.st.eraser.openhab2.mqtt" level="DEBUG" additivity="false">
             <Appender-ref ref="Console"/>
         </Logger>
+        <!-- Testcontainers reduce noise-->
+        <Logger name="org.testcontainers" level="INFO" additivity="false">
+            <Appender-ref ref="Console"/>
+            <AppenderRef ref="RollingFile"/>
+        </Logger>
+        <Logger name="com.github.dockerjava.core" level="INFO" additivity="false">
+            <Appender-ref ref="Console"/>
+            <AppenderRef ref="RollingFile"/>
+        </Logger>
     </Loggers>
 </Configuration>
diff --git a/eraser-base/src/test/resources/openhabtest/oh1/output.eraser b/eraser-base/src/test/resources/openhabtest/oh1/output.eraser
index c422840036be85ad9c3744a6fefc39ded481ee97..1844189cea3d085b031650470be0916f4943b21f 100644
--- a/eraser-base/src/test/resources/openhabtest/oh1/output.eraser
+++ b/eraser-base/src/test/resources/openhabtest/oh1/output.eraser
@@ -18,5 +18,3 @@ Color Item: id="wohnzimmer_item" label="Wohnzimmer" state="0,0,0" category="Ligh
 Color Item: id="iris1_item" label="Iris 1" state="226,100,98" category="Lighting" ;
 Group: id="all_dimmable_lamps" label="All dimmable lamps" items=["Go1_item"] aggregation="AVG" ;
 Group: id="Unknown" items=["Rule_Switch", "Color_Manual_Slider", "watch_acceleration_x", "watch_acceleration_y", "watch_acceleration_z", "watch_rotation_x", "watch_rotation_y", "watch_rotation_z", "phone_rotation_x", "phone_rotation_y", "phone_rotation_z", "samsung_brightness", "skywriter_flick_item", "polar_brightness", "moto_360_brightness", "wohnzimmer_item", "iris1_item"] ;
-Mqtt: host="localhost" ;
-Influx: host="localhost" ;
diff --git a/eraser-base/src/test/resources/openhabtest/oh2/links.json b/eraser-base/src/test/resources/openhabtest/oh2/links.json
index 79e79442806b87513d8581eb877453873069ddbe..f881db063fd53906ab14e3fda79dbf4a5fac0d4b 100644
--- a/eraser-base/src/test/resources/openhabtest/oh2/links.json
+++ b/eraser-base/src/test/resources/openhabtest/oh2/links.json
@@ -142,7 +142,7 @@
   {
     "channelUID": "openlicht:polar-m600:342dfc32:rotation-y",
     "configuration": {},
-    "itemName": "watch_acceleration_y"
+    "itemName": "watch_rotation_y"
   },
   {
     "channelUID": "openlicht:polar-m600:342dfc32:rotation-z",
diff --git a/eraser-base/src/test/resources/openhabtest/oh2/output.eraser b/eraser-base/src/test/resources/openhabtest/oh2/output.eraser
index c5e139935809383792596f6183ffe31514c8dbd0..e6354c12fa1e0a71edbe6eb77aafd9420c51069b 100644
--- a/eraser-base/src/test/resources/openhabtest/oh2/output.eraser
+++ b/eraser-base/src/test/resources/openhabtest/oh2/output.eraser
@@ -77,7 +77,7 @@ Channel: id="openlicht:polar-m600:342dfc32:activity" type="openlicht:activity-ty
 Channel: id="openlicht:polar-m600:342dfc32:brightness" type="openlicht:brightness-type" links=["polar_brightness"] ;
 Channel: id="openlicht:polar-m600:342dfc32:heart-rate" type="openlicht:heart-rate-type" ;
 Channel: id="openlicht:polar-m600:342dfc32:rotation-x" type="openlicht:rotation-type" links=["watch_rotation_x"] ;
-Channel: id="openlicht:polar-m600:342dfc32:rotation-y" type="openlicht:rotation-type" links=["watch_acceleration_y"] ;
+Channel: id="openlicht:polar-m600:342dfc32:rotation-y" type="openlicht:rotation-type" links=["watch_rotation_y"] ;
 Channel: id="openlicht:polar-m600:342dfc32:rotation-z" type="openlicht:rotation-type" links=["watch_rotation_z"] ;
 Channel: id="openlicht:polar-m600:342dfc32:steps" type="openlicht:steps-type" ;
 Channel: id="openlicht:samsung-s6:2ca84896:brightness" type="openlicht:brightness-type" links=["samsung_brightness"] ;
@@ -85,5 +85,3 @@ Channel: id="openlicht:samsung-s6:2ca84896:rotation-x" type="openlicht:rotation-
 Channel: id="openlicht:samsung-s6:2ca84896:rotation-y" type="openlicht:rotation-type" links=["phone_rotation_y"] ;
 Channel: id="openlicht:samsung-s6:2ca84896:rotation-z" type="openlicht:rotation-type" links=["phone_rotation_z"] ;
 Channel: id="openlicht:skywriter-hat:e937d4f3:flick" type="openlicht:flick-type" links=["skywriter_flick_item"] ;
-Mqtt: host="localhost" ;
-Influx: host="localhost" ;
diff --git a/eraser-base/src/test/resources/tests/hostNonDefaultPort/Test.properties b/eraser-base/src/test/resources/tests/hostNonDefaultPort/Test.properties
new file mode 100644
index 0000000000000000000000000000000000000000..616a3deddd4decffd5d0e6e3db7bb8ce3873562a
--- /dev/null
+++ b/eraser-base/src/test/resources/tests/hostNonDefaultPort/Test.properties
@@ -0,0 +1 @@
+result=OUTPUT_PASS
diff --git a/eraser-base/src/test/resources/tests/hostNonDefaultPort/description b/eraser-base/src/test/resources/tests/hostNonDefaultPort/description
new file mode 100644
index 0000000000000000000000000000000000000000..bdaa31556a7a02ab764f16f5a5157676ce5126bf
--- /dev/null
+++ b/eraser-base/src/test/resources/tests/hostNonDefaultPort/description
@@ -0,0 +1 @@
+Test, if non-default ports in MQTT and Influx are working.
diff --git a/eraser-base/src/test/resources/tests/hostNonDefaultPort/input.eraser b/eraser-base/src/test/resources/tests/hostNonDefaultPort/input.eraser
new file mode 100644
index 0000000000000000000000000000000000000000..427f6137aa8058df0e264bb6ca8f200eb71dbd7d
--- /dev/null
+++ b/eraser-base/src/test/resources/tests/hostNonDefaultPort/input.eraser
@@ -0,0 +1,2 @@
+Influx: host="www.example.com:1234";
+Mqtt: host="localhost:9876";
diff --git a/eraser-base/src/test/resources/tests/hostNonDefaultPort/output.eraser b/eraser-base/src/test/resources/tests/hostNonDefaultPort/output.eraser
new file mode 100644
index 0000000000000000000000000000000000000000..73d8147fea0252ec1c95364ad6ad5449a829b1e6
--- /dev/null
+++ b/eraser-base/src/test/resources/tests/hostNonDefaultPort/output.eraser
@@ -0,0 +1,2 @@
+Mqtt: host="localhost:9876" ;
+Influx: host="www.example.com:1234" ;
diff --git a/eraser-base/src/test/resources/tests/ppc3/output.eraser b/eraser-base/src/test/resources/tests/ppc3/output.eraser
index e4b9707a84cc17f4901da3a68abf73c27eac58fc..b9a6d2f2a4301a35562946cc67ceca725d35e9e9 100644
--- a/eraser-base/src/test/resources/tests/ppc3/output.eraser
+++ b/eraser-base/src/test/resources/tests/ppc3/output.eraser
@@ -2,7 +2,7 @@ Color Item: id="color1" label="a Color Item" state="1,2,3" topic="item/hsb/color
 Contact Item: id="contact1" label="a Contact Item" state="true" topic="item/bool/contact1/state" ;
 Image Item: id="image1" label="an Image Item" state="def" topic="item/str/image1/state" ;
 Location Item: id="location1" label="a Location Item" state="ghi" topic="item/str/location1/state" ;
-DateTime Item: id="datetime1" label="a DateTime Item" state="1970-01-18T20:43:35.826" topic="item/date/datetime1/state" ;
+DateTime Item: id="datetime1" label="a DateTime Item" state="1970-01-18T20:43:35.826Z" topic="item/date/datetime1/state" ;
 Item: id="default1" label="a Default Item" state="pqr" topic="item/str/default1/state" ;
 Dimmer Item: id="dimmer1" label="a Dimmer Item" state="123.0" topic="item/double/dimmer1/state" ;
 Player Item: id="player1" label="a Player Item" state="jkl" topic="item/str/player1/state" ;
diff --git a/eraser-base/src/test/resources/tests/ppc4/input.eraser b/eraser-base/src/test/resources/tests/ppc4/input.eraser
index f363a8c8ebff9a763dbb34fbca6010bc43588faf..f5c7afff39c0a2d617f293076a86a794fa24263f 100644
--- a/eraser-base/src/test/resources/tests/ppc4/input.eraser
+++ b/eraser-base/src/test/resources/tests/ppc4/input.eraser
@@ -15,6 +15,7 @@ Location Item : id="location1" label="a Location Item" state="ghi" topic="item/s
 Number Item : id="number1" label="a Number Item" state="456" topic="item/double/number1/state" controls=["string1"];
 Player Item : id="player1" label="a Player Item" state="jkl" topic="item/str/player1/state";
 RollerShutter Item : id="rollerShutter1" label="a RollerShutter Item" state="false" topic="item/str/rs1/state";
+Activity Item: id="activity";
 String Item : id="string1" label="a String Item" state="mno" topic="item/str/string1/state";
 Switch Item : id="switch1" label="a Switch Item" state="true" topic="item/bool/switch1/state" controls=["rollerShutter1"];
 Item : id="default1" label="a Default Item" state="pqr" topic="item/str/default1/state";
diff --git a/eraser-base/src/test/resources/tests/ppc4/output.eraser b/eraser-base/src/test/resources/tests/ppc4/output.eraser
index 1231dc15e00cb468e6bb01da988501d1b0f26720..1b881dd0d5d11159a298a1c41b1d04d530353bd8 100644
--- a/eraser-base/src/test/resources/tests/ppc4/output.eraser
+++ b/eraser-base/src/test/resources/tests/ppc4/output.eraser
@@ -2,18 +2,19 @@ Color Item: id="color1" label="a Color Item" state="1,2,3" topic="item/hsb/color
 Contact Item: id="contact1" label="a Contact Item" state="true" topic="item/bool/contact1/state" ;
 Image Item: id="image1" label="an Image Item" state="def" topic="item/str/image1/state" ;
 Location Item: id="location1" label="a Location Item" state="ghi" topic="item/str/location1/state" ;
-DateTime Item: id="datetime1" label="a DateTime Item" state="1970-01-18T20:43:35.826" topic="item/date/datetime1/state" ;
+DateTime Item: id="datetime1" label="a DateTime Item" state="1970-01-18T20:43:35.826Z" topic="item/date/datetime1/state" ;
 Item: id="default1" label="a Default Item" state="pqr" topic="item/str/default1/state" ;
 Dimmer Item: id="dimmer1" label="a Dimmer Item" state="123.0" topic="item/double/dimmer1/state" controls=["color1", "datetime1"] ;
 Player Item: id="player1" label="a Player Item" state="jkl" topic="item/str/player1/state" ;
 Number Item: id="number1" label="a Number Item" state="456.0" topic="item/double/number1/state" controls=["string1"] ;
 RollerShutter Item: id="rollerShutter1" label="a RollerShutter Item" state="false" topic="item/str/rs1/state" ;
+Activity Item: id="activity" ;
 String Item: id="string1" label="a String Item" state="mno" topic="item/str/string1/state" ;
 Switch Item: id="switch1" label="a Switch Item" state="true" topic="item/bool/switch1/state" controls=["rollerShutter1"] ;
 Group: id="my-first-group" items=["color1", "contact1", "image1", "location1"] aggregation="AND" ("ON", "OFF") ;
 Group: id="my-second-group" items=["datetime1", "default1"] ;
 Group: id="my-third-group" items=["dimmer1", "player1"] ;
 Group: id="my-empty-group" ;
-Group: id="Unknown" items=["number1", "rollerShutter1", "string1", "switch1"] ;
+Group: id="Unknown" items=["number1", "rollerShutter1", "activity", "string1", "switch1"] ;
 Mqtt: incoming="ppc3/" outgoing="oh2/in/" host="localhost" ;
 Influx: host="localhost" ;
diff --git a/eraser-base/src/test/resources/tests/ppc5/description b/eraser-base/src/test/resources/tests/ppc5/description
index 8db9b9bceee597ada76ef353e2e683003efa6e8e..ff98c287292d9b2a62174084a242cf6e4d25c012 100644
--- a/eraser-base/src/test/resources/tests/ppc5/description
+++ b/eraser-base/src/test/resources/tests/ppc5/description
@@ -1 +1,2 @@
 Complete feature-test, i.e., minimal and maximal many members set for each parseable non-terminal.
+Also test, if multi-line statements work.
diff --git a/eraser-base/src/test/resources/tests/ppc5/input.eraser b/eraser-base/src/test/resources/tests/ppc5/input.eraser
index 6cf47838d171c8c07e3919b42c05bc9ec8e80af6..21cc14bc796965aae71d2bca119f60b0cdf640c5 100644
--- a/eraser-base/src/test/resources/tests/ppc5/input.eraser
+++ b/eraser-base/src/test/resources/tests/ppc5/input.eraser
@@ -10,11 +10,15 @@ Group: id="min-group" ;
 Group: items=["min-item", "max-item"] id="max-group" groups=["min-group"] aggregation="AND" ("one", "two") ;
 
 Number Item: id="min-item" ; // state will be set to default value
-Switch Item: topic="items/max" controls=["min-item"] id="max-item" state="true" label="Item with all members set" category="not used" metaData={"one":"true", "zero":"false"} ;
+Switch Item: topic="items/max"
+ controls=["min-item"] id="max-item" state="true" label="Item with all members set" category="not used"
+ metaData={"one":"true", "zero":"false"} ;
 
 // Parameters will get sorted alphabetically in output
 Parameter: id="min-parameter" ;
-Parameter: label="Max Parameter" required id="max-parameter" type="Decimal" description="Parameter with all members set" default="should be a number" ;
+Parameter
+:
+label="Max Parameter" required id="max-parameter" type="Decimal" description="Parameter with all members set" default="should be a number" ;
 
 ThingType: id="min-thing-type" ;
 ThingType: id="max-thing-type" label="Max Thing Type" description="Thing type with all members set" parameters=["min-parameter", "max-parameter"] channelTypes=["min-channel-type", "max-channel-type-1", "max-channel-type-2"] ;
@@ -22,5 +26,10 @@ ThingType: id="max-thing-type" label="Max Thing Type" description="Thing type wi
 Thing: type="min-thing-type" id="min-thing" ; // "type" must be set
 Thing: id="max-thing" label="Max Thing" type="max-thing-type" channels=["min-channel", "max-channel"] ;
 
-Influx: ; // minimal Influx, most probably will fail?
-Mqtt: ; // minimal MQTT, most probably will fail?
+Influx: ; // minimal Influx, most probably will fail in an actual system
+Mqtt: ; // minimal MQTT, most probably will fail in an actual system
+
+ML: activities = {
+  0 : "an interesting activity",
+  3: "rather boring activity"
+} ;
diff --git a/eraser-base/src/test/resources/tests/ppc5/output.eraser b/eraser-base/src/test/resources/tests/ppc5/output.eraser
index 1df79c3d8afdc7181f6334d546052122c949fe49..22f0869e0f35ece07c2aaf9b6dedbf6f11c4ae3b 100644
--- a/eraser-base/src/test/resources/tests/ppc5/output.eraser
+++ b/eraser-base/src/test/resources/tests/ppc5/output.eraser
@@ -13,5 +13,4 @@ Parameter: id="max-parameter" label="Max Parameter" description="Parameter with
 Parameter: id="min-parameter" ;
 Channel: id="max-channel" type="max-channel-type-1" links=["min-item", "max-item"] ;
 Channel: id="min-channel" type="min-channel-type" ;
-Mqtt: ;
-Influx: ;
+ML: activities={0:"an interesting activity", 3:"rather boring activity"} ;
diff --git a/eraser.rest/build.gradle b/eraser.rest/build.gradle
index 0f7c52ef57d6d2c0100b9bb34e0473c5f4a05e6b..27a715637cd813a03182c27d332abedab4a02af0 100644
--- a/eraser.rest/build.gradle
+++ b/eraser.rest/build.gradle
@@ -23,15 +23,10 @@ sourceCompatibility = 1.8
 
 dependencies {
     compile project(':eraser-base')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
     compile 'org.springframework.boot:spring-boot-starter-web'
-    compile 'io.springfox:springfox-swagger2:2.9.2'
-    compile 'io.springfox:springfox-swagger-ui:2.9.2'
+    compile group: 'io.springfox', name: 'springfox-swagger2', version: '2.9.2'
+    compile group: 'io.springfox', name: 'springfox-swagger-ui', version: '2.9.2'
     testCompile 'org.springframework.boot:spring-boot-starter-test'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 sourceSets {
diff --git a/eraser.rest/src/main/resources/log4j2.xml b/eraser.rest/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/eraser.rest/src/main/resources/log4j2.xml
+++ b/eraser.rest/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/eraser.spark/build.gradle b/eraser.spark/build.gradle
index c5bdd901ca07fd6e2483c795e947a771488f11e2..fc14267292c11e5920e2bc3b7609a45a98d6ec6c 100644
--- a/eraser.spark/build.gradle
+++ b/eraser.spark/build.gradle
@@ -1,7 +1,7 @@
 plugins {
     id 'java'
     id 'application'
-    id 'io.franzbecker.gradle-lombok' version '1.14'
+    id 'io.franzbecker.gradle-lombok' version '3.0.0'
 }
 
 repositories {
@@ -12,13 +12,8 @@ sourceCompatibility = 1.8
 
 dependencies {
     compile project(':eraser-base')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    compile 'org.apache.logging.log4j:log4j-slf4j-impl:2.11.1'
-    compile 'com.sparkjava:spark-core:2.8.0'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
+    compile group: 'org.apache.logging.log4j', name: 'log4j-slf4j-impl', version: '2.11.2'
+    compile group: 'com.sparkjava', name: 'spark-core', version: '2.9.0'
 }
 
 run {
diff --git a/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/Application.java b/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/Application.java
index 2602ca703f7cab2ba7fcf8ffc03158d6b65f591f..ae15f566876bb233706d34a9b563fa85af195244 100644
--- a/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/Application.java
+++ b/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/Application.java
@@ -9,7 +9,6 @@ import spark.Request;
 import spark.Response;
 import spark.Spark;
 
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -26,12 +25,12 @@ import java.util.stream.Collectors;
 public class Application {
 
   private static final Logger logger = LogManager.getLogger(Application.class);
-  private final Root model;
+  private final Root root;
   private final Lock lock;
   private final Condition quitCondition;
 
-  private Application(Root model, Lock lock, Condition quitCondition) {
-    this.model = model;
+  private Application(Root root, Lock lock, Condition quitCondition) {
+    this.root = root;
     this.lock = lock;
     this.quitCondition = quitCondition;
   }
@@ -43,19 +42,19 @@ public class Application {
 
     Spark.path("/activity", () -> {
       Spark.get("",
-          (request, response) -> wrapActivityList(model.getMachineLearningRoot().getActivityList()),
+          (request, response) -> wrapActivityList(root.getMachineLearningRoot().getActivityList()),
           mapper::writeValueAsString);
       Spark.get("/current",
-          (request, response) -> JavaUtils.ifPresentOrElseReturn(model.currentActivity(),
+          (request, response) -> JavaUtils.ifPresentOrElseReturn(root.currentActivity(),
               this::wrapActivity,
               () -> makeError(response, 204, "No activity recognized.")),
           mapper::writeValueAsString);
       Spark.put("/current", (request, response) -> {
         logger.info("request body: '{}', params: '{}', length={}", request.body(), request.params(), request.contentLength());
-        if (!model.getMachineLearningRoot().hasActivityRecognition()) {
+        if (!root.getMachineLearningRoot().hasActivityRecognition()) {
           return makeError(response, 404, "No activity recognition model found");
         }
-        MachineLearningModel activityRecognition = model.getMachineLearningRoot().getActivityRecognition();
+        MachineLearningModel activityRecognition = root.getMachineLearningRoot().getActivityRecognition();
         if (activityRecognition.canSetActivity()) {
           activityRecognition.setActivity(Integer.valueOf(request.body()));
           return "OK";
@@ -65,7 +64,7 @@ public class Application {
       });
       Spark.get("/:identifier",
           (request, response) ->
-              JavaUtils.ifPresentOrElseReturn(model.resolveActivity(paramAsInt(request, "identifier")),
+              JavaUtils.ifPresentOrElseReturn(root.resolveActivity(paramAsInt(request, "identifier")),
                   this::wrapActivity,
                   () -> makeError(response, 404, "No activity for identifier " + request.params("identifier"))),
           mapper::writeValueAsString);
@@ -73,11 +72,11 @@ public class Application {
 
     Spark.path("/events", () -> {
       Spark.get("",
-          (request, response) -> wrapChangeEventList(model.getMachineLearningRoot().getChangeEventList()),
+          (request, response) -> wrapChangeEventList(root.getMachineLearningRoot().getChangeEventList()),
           mapper::writeValueAsString);
       Spark.get("/:identifier",
           (request, response) ->
-              JavaUtils.ifPresentOrElseReturn(model.resolveChangeEvent(paramAsInt(request, "identifier")),
+              JavaUtils.ifPresentOrElseReturn(root.resolveChangeEvent(paramAsInt(request, "identifier")),
                   this::wrapChangeEvent,
                   () -> makeError(response, 404, "No event for identifier " + request.params("identifier"))),
           mapper::writeValueAsString);
@@ -86,10 +85,10 @@ public class Application {
     Spark.path("/model", () -> {
       Spark.get("/full", (request, response) -> {
         response.type("text/plain");
-        return model.prettyPrint();
+        return root.prettyPrint();
       });
       Spark.get("/items",
-          (request, response) -> wrapItemList(model.items()),
+          (request, response) -> wrapItemList(root.getOpenHAB2Model().items()),
           mapper::writeValueAsString);
       Spark.put("/items/:identifier/state", (request, response) -> {
         logger.info("request body: '{}', params: '{}', length={}", request.body(), request.params(), request.contentLength());
@@ -124,13 +123,13 @@ public class Application {
   }
 
   private Object safeItemRoute(Request request, Response response, Function<Item, String> action) {
-    return JavaUtils.ifPresentOrElseReturn(model.resolveItem(request.params("identifier")), action,
+    return JavaUtils.ifPresentOrElseReturn(root.getOpenHAB2Model().resolveItem(request.params("identifier")), action,
         () -> makeError(response, 404, "Item '" + request.body() + "' not found"));
   }
 
   private String makeHistory(Item item, Response response) {
     response.type("text/plain");
-    InfluxAdapter influxAdapter = model.getInfluxRoot().influxAdapter();
+    InfluxAdapter influxAdapter = root.getInfluxRoot().influxAdapter();
     influxAdapter.disableAsyncQuery();
     List<? extends AbstractItemPoint> list;
     if (item.asColorItem() != null) {
@@ -181,7 +180,9 @@ public class Application {
   private SimpleItem wrapItem(Item item) {
     return SimpleItem.of(item.getID(),
         item.getLabel(),
-        item.getTopic() != null ? item.getTopic().allParts() : null,
+        item.getTopic() != null ? item.getTopic().getTopicString() : null,
+        item.isFrozen(),
+        item.isSendState(),
         item.getControllingList().stream().map(Item::getID).collect(Collectors.toList()),
         wrapMetaData(item.getMetaDataList()));
   }
diff --git a/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/DummyDataCreator.java b/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/DummyDataCreator.java
index 902f15bbd31fd7480bd413a239526b9576f68015..e548a6bc95012ba72675131395b9956f4a84466f 100644
--- a/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/DummyDataCreator.java
+++ b/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/DummyDataCreator.java
@@ -10,21 +10,21 @@ import de.tudresden.inf.st.eraser.util.JavaUtils;
  */
 class DummyDataCreator {
 
-  private final Root model;
+  private final Root root;
 
-  DummyDataCreator(Root model) {
-    this.model = model;
+  DummyDataCreator(Root root) {
+    this.root = root;
   }
 
   private void addDummyActivitiesToModel() {
-    MachineLearningRoot mlRoot = model.getMachineLearningRoot();
+    MachineLearningRoot mlRoot = root.getMachineLearningRoot();
     mlRoot.addActivity(new Activity(1, "Sitting in armchair"));
     mlRoot.addActivity(new Activity(2, "Going to sleep"));
     mlRoot.addActivity(new Activity(3, "Entering house"));
   }
 
   private void addDummyChangeEventsToModel() {
-    MachineLearningRoot mlRoot = model.getMachineLearningRoot();
+    MachineLearningRoot mlRoot = root.getMachineLearningRoot();
     Item iris1 = getOrCreateColorItem("iris1", "Hue Iris 1");
     Item go1 = getOrCreateColorItem("go1", "Hue Go 1");
     Item go2 = getOrCreateColorItem("go2", "Hue Go 2");
@@ -40,7 +40,7 @@ class DummyDataCreator {
   }
 
   private Item getOrCreateColorItem(String name, String label) {
-    return model.resolveItem(name).orElseGet(() -> {
+    return root.getOpenHAB2Model().resolveItem(name).orElseGet(() -> {
       ColorItem result = new ColorItem();
       result.setID(name);
       result.setLabel(label);
@@ -53,7 +53,7 @@ class DummyDataCreator {
 
   private RecognitionEvent newRecognitionEvent(int identifier, long timestamp, int activityIdentifier, ChangedItem... changedItems) {
     RecognitionEvent result = new RecognitionEvent();
-    JavaUtils.ifPresentOrElse(model.resolveActivity(activityIdentifier), result::setActivity, () -> { throw new RuntimeException("No activity found for identifier " + activityIdentifier); });
+    JavaUtils.ifPresentOrElse(root.resolveActivity(activityIdentifier), result::setActivity, () -> { throw new RuntimeException("No activity found for identifier " + activityIdentifier); });
     initChangeEvent(result, identifier, timestamp, changedItems);
     return result;
   }
diff --git a/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/SimpleItem.java b/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/SimpleItem.java
index 86007bae0cb6ea0366f48a7282341f86c75d8510..2eac624367215c547ef4cfe0058fdcce337062fa 100644
--- a/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/SimpleItem.java
+++ b/eraser.spark/src/main/java/de/tudresden/inf/st/eraser/spark/SimpleItem.java
@@ -15,6 +15,8 @@ public class SimpleItem {
   public final String ID;
   public final String label;
   public final String topic;
+  public final boolean frozen;
+  public final boolean sendState;
   public final List<String> controlling;
   public final Map<String, String> metaData;
 }
diff --git a/eraser.spark/src/main/resources/log4j2.xml b/eraser.spark/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/eraser.spark/src/main/resources/log4j2.xml
+++ b/eraser.spark/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/eraser.starter/build.gradle b/eraser.starter/build.gradle
index c0430b43501673b3ba4ab1b5ffce20d67789f99e..c6d86dcf9977de6e043e469e17726aa87224fb75 100644
--- a/eraser.starter/build.gradle
+++ b/eraser.starter/build.gradle
@@ -1,12 +1,16 @@
+plugins {
+    id 'java'
+    id 'application'
+    id 'distribution'
+    id 'io.github.http-builder-ng.http-plugin' version '0.1.1'
+}
+
 repositories {
     mavenCentral()
 }
 
 sourceCompatibility = 1.8
 
-apply plugin: 'java'
-apply plugin: 'application'
-
 dependencies {
     compile project(':eraser-base')
     compile project(':eraser.spark')
@@ -15,14 +19,9 @@ dependencies {
     compile project(':feedbackloop.plan')
     compile project(':feedbackloop.execute')
     compile project(':feedbackloop.learner')
-    compile 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.9.8'
-    compile 'com.fasterxml.jackson.core:jackson-databind:2.9.8'
+    compile group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.9.8'
     compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    compile 'net.sourceforge.argparse4j:argparse4j:0.8.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
+    compile group: 'net.sourceforge.argparse4j', name: 'argparse4j', version: '0.8.1'
 }
 
 run {
@@ -30,6 +29,20 @@ run {
     standardInput = System.in
 }
 
+import io.github.httpbuilderng.http.HttpTask
+
+task shutdown (type: HttpTask) {
+    group = 'application'
+    description = 'Shuts down a running eraser application'
+
+    config {
+        request.uri = 'http://localhost:4567'
+    }
+    post {
+        request.uri.path = '/system/exit'
+    }
+}
+
 sourceSets {
     main {
         java {
@@ -37,3 +50,13 @@ sourceSets {
         }
     }
 }
+
+distributions {
+    main {
+        contents {
+            from {
+                'src/main/resources/starter.eraser'
+            }
+        }
+    }
+}
diff --git a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java
index 8971c806aa0331123e83720ad8906f2214d57faa..b263fd586f95781c26345076e7a3132472f20eab 100644
--- a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java
+++ b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/EraserStarter.java
@@ -8,26 +8,30 @@ import de.tudresden.inf.st.eraser.feedbackloop.api.Analyze;
 import de.tudresden.inf.st.eraser.feedbackloop.api.Execute;
 import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
 import de.tudresden.inf.st.eraser.feedbackloop.api.Plan;
-import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
+import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
 import de.tudresden.inf.st.eraser.feedbackloop.execute.ExecuteImpl;
+import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerHelper;
 import de.tudresden.inf.st.eraser.feedbackloop.learner.LearnerImpl;
 import de.tudresden.inf.st.eraser.feedbackloop.plan.PlanImpl;
-import de.tudresden.inf.st.eraser.jastadd.model.DummyMachineLearningModel;
-import de.tudresden.inf.st.eraser.jastadd.model.InfluxAdapter;
-import de.tudresden.inf.st.eraser.jastadd.model.NeuralNetworkRoot;
-import de.tudresden.inf.st.eraser.jastadd.model.Root;
+import de.tudresden.inf.st.eraser.jastadd.model.*;
 import de.tudresden.inf.st.eraser.openhab2.OpenHab2Importer;
 import de.tudresden.inf.st.eraser.openhab2.mqtt.MQTTUpdater;
 import de.tudresden.inf.st.eraser.spark.Application;
 import de.tudresden.inf.st.eraser.util.JavaUtils;
 import de.tudresden.inf.st.eraser.util.ParserUtils;
+import net.sourceforge.argparse4j.ArgumentParsers;
+import net.sourceforge.argparse4j.annotation.Arg;
+import net.sourceforge.argparse4j.inf.ArgumentParser;
+import net.sourceforge.argparse4j.inf.ArgumentParserException;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.Collections;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.Lock;
@@ -44,13 +48,30 @@ import java.util.concurrent.locks.ReentrantLock;
  */
 public class EraserStarter {
 
+  private static class CommandLineOptions {
+    @Arg(dest = "config_file")
+    String configFile;
+  }
+
   private static final Logger logger = LogManager.getLogger(EraserStarter.class);
 
   @SuppressWarnings("ResultOfMethodCallIgnored")
   public static void main(String[] args) {
-    logger.info("Starting ERASER");
+    ArgumentParser parser = ArgumentParsers.newFor("eraser").build()
+        .defaultHelp(true)
+        .description("Starts the knowledge-base of OpenLicht");
+    parser.addArgument("-f", "--config-file")
+        .help("Path to the configuration YAML file")
+        .setDefault("starter-setting.yaml");
+    CommandLineOptions commandLineOptions = new CommandLineOptions();
+    try {
+      parser.parseArgs(args, commandLineOptions);
+    } catch (ArgumentParserException e) {
+      parser.handleError(e);
+      System.exit(1);
+    }
     ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
-    File settingsFile = new File("starter-setting.yaml");
+    File settingsFile = new File(commandLineOptions.configFile);
     Setting settings;
     try {
       settings = mapper.readValue(settingsFile, Setting.class);
@@ -60,14 +81,17 @@ public class EraserStarter {
       System.exit(1);
       return;
     }
+    logger.info("Starting ERASER");
     boolean startRest = settings.rest.use;
 
-    Root model;
+    Root root;
+    OpenHAB2Model model;
     switch (settings.initModelWith) {
       case openhab:
         OpenHab2Importer importer = new OpenHab2Importer();
         try {
           model = importer.importFrom(new URL(settings.openhab.url));
+          root = model.getRoot();
         } catch (MalformedURLException e) {
           logger.error("Could not parse URL {}", settings.openhab.url);
           logger.catching(e);
@@ -79,7 +103,8 @@ public class EraserStarter {
       case load:
       default:
         try {
-          model = ParserUtils.load(settings.load.file, EraserStarter.class);
+          root = ParserUtils.load(settings.load.realURL());
+          model = root.getOpenHAB2Model();
         } catch (IOException | Parser.Exception e) {
           logger.error("Problems parsing the given file {}", settings.load.file);
           logger.catching(e);
@@ -89,9 +114,10 @@ public class EraserStarter {
     }
 
     // initialize activity recognition
+    MachineLearningRoot machineLearningRoot = root.getMachineLearningRoot();
     if (settings.activity.dummy) {
       logger.info("Using dummy activity recognition");
-      model.getMachineLearningRoot().setActivityRecognition(DummyMachineLearningModel.createDefault());
+      machineLearningRoot.setActivityRecognition(DummyMachineLearningModel.createDefault());
     } else {
       logger.error("Reading activity recognition from file is not supported yet!");
       // TODO
@@ -100,30 +126,46 @@ public class EraserStarter {
     // initialize preference learning
     if (settings.preference.dummy) {
       logger.info("Using dummy preference learning");
-      model.getMachineLearningRoot().setPreferenceLearning(DummyMachineLearningModel.createDefault());
+      machineLearningRoot.setPreferenceLearning(DummyMachineLearningModel.createDefault());
     } else {
       logger.info("Reading preference learning from file {}", settings.preference.file);
       Learner learner = new LearnerImpl();
       // there should be a method to load a model using an URL
-      Model preference = learner.getTrainedModel(settings.preference.realURL(), settings.preference.id);
-      NeuralNetworkRoot neuralNetwork = LearnerHelper.transform(preference);
-      if (neuralNetwork == null) {
-        logger.error("Could not create preference model, see possible previous errors.");
+      boolean loadingSuccessful = false;
+      try (InputStream input = settings.preference.realURL().openStream()) {
+        loadingSuccessful = learner.loadModelFromFile(input, settings.preference.id,
+            Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
+      } catch (IOException e) {
+        logger.catching(e);
+        loadingSuccessful = false;
+      }
+//      Model preference = learner.getTrainedModel(settings.preference.realURL(), settings.preference.id);
+      logger.debug("Loading of {} was {}", settings.preference.realURL(), loadingSuccessful ? "successful" : "failed");
+      if (loadingSuccessful) {
+        EncogModel preference = learner.getTrainedModel(settings.preference.id);
+        NeuralNetworkRoot neuralNetwork = LearnerHelper.transform(preference);
+        if (neuralNetwork == null) {
+          logger.error("Could not create preference model, see possible previous errors.");
+        } else {
+          machineLearningRoot.setPreferenceLearning(neuralNetwork);
+          neuralNetwork.setOutputApplication(zeroToThree -> 33 * zeroToThree);
+          JavaUtils.ifPresentOrElse(
+              model.resolveItem(settings.preference.affectedItem),
+              item -> neuralNetwork.getOutputLayer().setAffectedItem(item),
+              () -> logger.error("Output item not set from value '{}'", settings.preference.affectedItem));
+        }
       } else {
-        model.getMachineLearningRoot().setPreferenceLearning(neuralNetwork);
-        neuralNetwork.connectItems(settings.preference.items);
-        neuralNetwork.setOutputApplication(zeroToThree -> 33 * zeroToThree);
-        JavaUtils.ifPresentOrElse(
-            model.resolveItem(settings.preference.affectedItem),
-            item -> neuralNetwork.getOutputLayer().setAffectedItem(item),
-            () -> logger.error("Output item not set from value '{}'", settings.preference.affectedItem));
+        // loading was not successful
+        logger.warn("Falling back to dummy preference learning");
+        machineLearningRoot.setPreferenceLearning(DummyMachineLearningModel.createDefault());
       }
     }
-    if (!model.getMachineLearningRoot().getActivityRecognition().check()) {
+    machineLearningRoot.getPreferenceLearning().connectItems(settings.preference.items);
+    if (!machineLearningRoot.getActivityRecognition().check()) {
       logger.fatal("Invalid activity recognition!");
       System.exit(1);
     }
-    if (!model.getMachineLearningRoot().getPreferenceLearning().check()) {
+    if (!machineLearningRoot.getPreferenceLearning().check()) {
       logger.fatal("Invalid preference learning!");
       System.exit(1);
     }
@@ -142,9 +184,9 @@ public class EraserStarter {
       analyze.setPlan(plan);
       plan.setExecute(execute);
 
-      analyze.setKnowledgeBase(model);
-      plan.setKnowledgeBase(model);
-      execute.setKnowledgeBase(model);
+      analyze.setKnowledgeBase(root);
+      plan.setKnowledgeBase(root);
+      execute.setKnowledgeBase(root);
 
       analyze.startAsThread(1, TimeUnit.SECONDS);
     } else {
@@ -154,7 +196,7 @@ public class EraserStarter {
     if (settings.mqttUpdate) {
       logger.info("Starting MQTT updater");
       Thread t = new Thread(() -> {
-        try (MQTTUpdater updater = new MQTTUpdater(model)) {
+        try (MQTTUpdater updater = new MQTTUpdater(root)) {
           updater.start();
           updater.waitUntilReady(5, TimeUnit.SECONDS);
           lock.lock();
@@ -174,7 +216,7 @@ public class EraserStarter {
       // start REST-API in new thread
       logger.info("Starting REST server");
       Thread t = new Thread(
-          () -> Application.start(settings.rest.port, model, settings.rest.createDummyMLData, lock, quitCondition),
+          () -> Application.start(settings.rest.port, root, settings.rest.createDummyMLData, lock, quitCondition),
           "REST-API");
       t.setDaemon(true);
       t.start();
@@ -208,7 +250,7 @@ public class EraserStarter {
     if (analyze != null) {
       analyze.stop();
     }
-    InfluxAdapter influxAdapter = model.getInfluxRoot().influxAdapter();
+    InfluxAdapter influxAdapter = root.getInfluxRoot().influxAdapter();
     if (influxAdapter != null) {
       try {
         influxAdapter.close();
diff --git a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/LearnerHelper.java b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/LearnerHelper.java
deleted file mode 100644
index 34404713f0e3abc32cf87ebca43db783c9dc3288..0000000000000000000000000000000000000000
--- a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/LearnerHelper.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package de.tudresden.inf.st.eraser.starter;
-
-import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
-import de.tudresden.inf.st.eraser.jastadd.model.*;
-import org.apache.commons.math3.stat.StatUtils;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-
-/**
- * Transformation of a {@link Model} into a {@link MachineLearningModel}.
- *
- * @author rschoene - Initial contribution
- */
-class LearnerHelper {
-
-  private static final Logger logger = LogManager.getLogger(LearnerHelper.class);
-
-  // Activation Functions
-  private static DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
-  private static DoubleArrayDoubleFunction tanh = inputs -> Math.tanh(Arrays.stream(inputs).sum());
-  private static DoubleArrayDoubleFunction function_one = inputs -> 1.0;
-
-  static NeuralNetworkRoot transform(Model model) {
-    NeuralNetworkRoot result = NeuralNetworkRoot.createEmpty();
-    ArrayList<Double> weights = model.getWeights();
-
-    // inputs
-    int inputSum = model.getInputLayerNumber() + model.getInputBias();
-    for (int i = 0; i < inputSum; ++i) {
-      InputNeuron inputNeuron = new InputNeuron();
-      result.addInputNeuron(inputNeuron);
-    }
-    InputNeuron bias = result.getInputNeuron(model.getInputBias());
-
-    OutputLayer outputLayer = new OutputLayer();
-    // output layer
-    for (int i = 0; i < model.getOutputLayerNumber(); ++i) {
-      OutputNeuron outputNeuron = new OutputNeuron();
-      setActivationFunction(outputNeuron, model.getOutputActivationFunction());
-      outputLayer.addOutputNeuron(outputNeuron);
-    }
-    result.setOutputLayer(outputLayer);
-
-    // hidden layer
-    int hiddenSum = model.gethiddenLayerNumber() + model.getHiddenBias();
-    HiddenNeuron[] hiddenNeurons = new HiddenNeuron[hiddenSum];
-    for (int i = 0; i < (hiddenNeurons.length); i++) {
-      if (i == model.gethiddenLayerNumber()) {
-        HiddenNeuron hiddenNeuron = new HiddenNeuron();
-        hiddenNeuron.setActivationFormula(function_one);
-        hiddenNeurons[i] = hiddenNeuron;
-        result.addHiddenNeuron(hiddenNeuron);
-        bias.connectTo(hiddenNeuron, 1.0);
-        for (int out = 0; out < outputLayer.getNumOutputNeuron(); out++) {
-          hiddenNeuron.connectTo(outputLayer.getOutputNeuron(out), weights.get(i + hiddenSum * out));
-        }
-      } else {
-        HiddenNeuron hiddenNeuron = new HiddenNeuron();
-        setActivationFunction(hiddenNeuron, model.getHiddenActivationFunction());
-        hiddenNeurons[i] = hiddenNeuron;
-        result.addHiddenNeuron(hiddenNeuron);
-        for (int in = 0; in < inputSum; in++) {
-          // TODO replace 4 and 5 with model-attributes
-          result.getInputNeuron(in).connectTo(hiddenNeuron, weights.get((hiddenNeurons.length * 4 + in) + i * 5));
-        }
-        for (int out = 0; out < outputLayer.getNumOutputNeuron(); out++) {
-          hiddenNeuron.connectTo(outputLayer.getOutputNeuron(out), weights.get(i + hiddenSum * out));
-        }
-      }
-    }
-    outputLayer.setCombinator(LearnerHelper::predictor);
-    logger.info("Created model with {} input, {} hidden and {} output neurons",
-        result.getNumInputNeuron(), result.getNumHiddenNeuron(), result.getOutputLayer().getNumOutputNeuron());
-    return result;
-  }
-
-  private static void setActivationFunction(HiddenNeuron neuron, String functionName) {
-    switch (functionName) {
-      case "ActivationTANH": neuron.setActivationFormula(tanh); break;
-      case "ActivationLinear": neuron.setActivationFormula(function_one);
-      case "ActivationSigmoid": neuron.setActivationFormula(sigmoid); break;
-      default: throw new IllegalArgumentException("Unknown function " + functionName);
-    }
-  }
-
-  private static double predictor(double[] inputs) {
-    int index = 0;
-    double maxInput = StatUtils.max(inputs);
-    for (int i = 0; i < inputs.length; i++) {
-      if (inputs[i] == maxInput) {
-        index = i;
-      }
-    }
-    //outputs from learner
-    final double[] outputs = new double[]{2.0, 1.0, 3.0, 0.0};
-    return outputs[index];
-  }
-
-}
diff --git a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/Setting.java b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/Setting.java
index fb08ee7df0463a9d2feb56bc2bf9d3e8296df791..dacbb4287bb16d2f4fcdb5ba2e2cb4b67cf6406e 100644
--- a/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/Setting.java
+++ b/eraser.starter/src/main/java/de/tudresden/inf/st/eraser/starter/Setting.java
@@ -1,6 +1,11 @@
 package de.tudresden.inf.st.eraser.starter;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.net.MalformedURLException;
 import java.net.URL;
+import java.nio.file.Paths;
 import java.util.List;
 
 /**
@@ -10,6 +15,7 @@ import java.util.List;
  */
 @SuppressWarnings("WeakerAccess")
 class Setting {
+  private final static Logger logger = LogManager.getLogger(Setting.class);
   public class Rest {
     /** Start the REST server. Default: true. */
     public boolean use = true;
@@ -20,9 +26,20 @@ class Setting {
   }
   public class FileContainer {
     public String file;
+    /** Whether the file is external (not shipped with the JAR). Default: false. */
+    public boolean external = false;
     /** Get the URL to the {@link #file} */
     URL realURL() {
-      return Setting.class.getClassLoader().getResource(file);
+      if (external) {
+        try {
+          return Paths.get(file).toUri().toURL();
+        } catch (MalformedURLException e) {
+          logger.catching(e);
+          return null;
+        }
+      } else {
+        return Setting.class.getClassLoader().getResource(file);
+      }
     }
   }
   public class MLContainer extends FileContainer {
diff --git a/eraser.starter/src/main/resources/log4j2.xml b/eraser.starter/src/main/resources/log4j2.xml
index 2ff094e32bbda4cd215ebac36f65b4394d607cad..900e8c305d176e82d8f8a3c6beb1e96acfe186a8 100644
--- a/eraser.starter/src/main/resources/log4j2.xml
+++ b/eraser.starter/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/eraser.starter/src/main/resources/starter.eraser b/eraser.starter/src/main/resources/starter.eraser
index 8d08d97431d0ea2a3a99e019b2126f1e1d04adb5..25505fe24d64d3ed08ecdc0a3d741625b25e46ad 100644
--- a/eraser.starter/src/main/resources/starter.eraser
+++ b/eraser.starter/src/main/resources/starter.eraser
@@ -1,14 +1,28 @@
-Color Item: id="iris1_item" label="Iris 1" state="121,88,68" topic="iris1_item/state";
-Number Item: id="datetime_month" label="Month" state="1" topic="datetime/month";
-Number Item: id="datetime_day" label="Day" state="31" topic="datetime/day";
-Number Item: id="datetime_hour" label="Hour" state="13" topic="datetime/hour";
-Number Item: id="datetime_minute" label="Minute" state="37" topic="datetime/minute";
+Color Item: id="iris1_item" label="Iris 1" state="121,88,68" topic="iris1_item";
+Number Item: id="datetime_month" label="Month" state="1" topic="datetime_month";
+Number Item: id="datetime_day" label="Day" state="31" topic="datetime_day";
+Number Item: id="datetime_hour" label="Hour" state="13" topic="datetime_hour";
+Number Item: id="datetime_minute" label="Minute" state="37" topic="datetime_minute";
 Number Item: id="bias" label="bias item" state="1" ;
+Activity Item: id="activity" ;
 
 Group: id="Lights" items=["iris1_item"];
-Group: id="Datetime" items=["datetime-month", "datetime-day", "datetime-hour", "datetime-minute"];
+Group: id="Datetime" items=["datetime_month", "datetime_day", "datetime_hour", "datetime_minute"];
 
-Mqtt: incoming="oh2/out/" outgoing="oh2/in/" host="localhost" ;
+Mqtt: incoming="oh2/out/" outgoing="oh2/in/" host="localhost:2883" ;
 //Mqtt: incoming="oh2/out/" outgoing="oh2/in/" host="192.168.1.250" ;
 
 Influx: host="172.22.1.152" ;
+
+ML: activities={
+  0: "Open door in empty room",
+  1: "Door closed in empty room",
+  2: "Open door with person in room",
+  3: "Door closed with person in room",
+  4: "Working",
+  5: "Watch TV",
+  6: "Reading",
+  7: "Listening to music",
+  8: "Going to sleep",
+  9: "Wake up"
+} ;
diff --git a/eraser.starter/starter-setting.yaml b/eraser.starter/starter-setting.yaml
index 01d5ec91350e51e34e2073ee7dce2bc0470f64d9..f29763c43dfa4b789f872efc9eec88439930eff9 100644
--- a/eraser.starter/starter-setting.yaml
+++ b/eraser.starter/starter-setting.yaml
@@ -9,13 +9,18 @@ rest:
   # Port of the REST server. Default: 4567.
   port: 4567
   # Add some dummy data for activities and events. Only effective when using the REST server. Default: false.
-  createDummyMLData: true
+  createDummyMLData: false
 
 # Initialize the knowledge base with a file.
 # OpenHAB synchronization is done if MQTT url is set in the processed file
 load:
   # File to read in. Expected format = eraser
-  file: starter.eraser
+#  # Option 1: Use built-in file
+#  file: starter.eraser
+#  external: false
+  # Option 2: Use external file
+  file: src/main/resources/starter.eraser
+  external: true
 
 # Model for activity recognition. If dummy is true, then the file parameter is ignored.
 activity:
@@ -40,7 +45,7 @@ preference:
     - datetime_day
     - datetime_hour
     - datetime_minute
-    - bias
+#    - activity
   # Item to change with classification result
   affectedItem: iris1_item
 
diff --git a/feedbackloop.analyze/build.gradle b/feedbackloop.analyze/build.gradle
index b59641cfc6d47d4dac802ed0387bdd38806b3004..077136f33bd04ccde2acb2cce23df7ba22c69c7a 100644
--- a/feedbackloop.analyze/build.gradle
+++ b/feedbackloop.analyze/build.gradle
@@ -9,11 +9,6 @@ apply plugin: 'java'
 dependencies {
     compile project(':eraser-base')
     compile project(':feedbackloop.api')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 sourceSets {
diff --git a/feedbackloop.analyze/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/analyze/AnalyzeImpl.java b/feedbackloop.analyze/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/analyze/AnalyzeImpl.java
index f58867a0ade42cba5c3da2ac4668ece0ee56bf60..5e9f8cfb5476ae25219cebd704e17e2e2b78cb9d 100644
--- a/feedbackloop.analyze/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/analyze/AnalyzeImpl.java
+++ b/feedbackloop.analyze/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/analyze/AnalyzeImpl.java
@@ -45,7 +45,11 @@ public class AnalyzeImpl implements Analyze {
         // new! inform plan!
         logger.info("Found new activity '{}'", activity.getLabel());
         mostRecentActivity = activity;
-        informPlan(activity);
+        try {
+          informPlan(activity);
+        } catch (Exception e) {
+          logger.catching(e);
+        }
       }
     });
   }
diff --git a/feedbackloop.analyze/src/main/resources/log4j2.xml b/feedbackloop.analyze/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.analyze/src/main/resources/log4j2.xml
+++ b/feedbackloop.analyze/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/feedbackloop.api/build.gradle b/feedbackloop.api/build.gradle
index 9403a7197373bc6fcaa2e68821261fbc264ee6d4..0039b3d515db9d781cbbdc37c25ab42fa23fa855 100644
--- a/feedbackloop.api/build.gradle
+++ b/feedbackloop.api/build.gradle
@@ -1,20 +1,18 @@
+plugins {
+    id 'java'
+    id 'io.franzbecker.gradle-lombok' version '3.0.0'
+}
+
 repositories {
     mavenCentral()
 }
 
 sourceCompatibility = 1.8
 
-apply plugin: 'java'
-
 dependencies {
     compile project(':eraser-base')
-    compile project(':commons.color:')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
-    compile 'org.encog:encog-core:3.4'
+    compile project(':commons.color')
+    compile group: 'org.encog', name: 'encog-core', version: '3.4'
 }
 
 sourceSets {
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/EncogModel.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/EncogModel.java
new file mode 100644
index 0000000000000000000000000000000000000000..c0bc0336ea49e810ac5930c30600e271e65c6861
--- /dev/null
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/EncogModel.java
@@ -0,0 +1,44 @@
+package de.tudresden.inf.st.eraser.feedbackloop.api;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.encog.neural.networks.layers.Layer;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * 
+ * This class represents an object that contains all information of a trained neural network.
+ * 
+ * For now: weights and the {@link Layer}s
+ * 
+ * @author Bierzyns - initial contribution
+ * */
+@Getter
+@Setter
+public class EncogModel {
+	/**
+	 * todo
+	 */
+	private String modelType;
+	private List<Double> weights;
+	private List<Layer> layers;
+
+	public EncogModel(String model) {
+		modelType = model;
+	}
+
+	public Layer getInputLayer() {
+		return Objects.requireNonNull(layers, "Layers not set yet").get(0);
+	}
+
+	public Layer getOutputLayer() {
+		return Objects.requireNonNull(layers, "Layers not set yet").get(layers.size() - 1);
+	}
+
+	public List<Layer> getHiddenLayers() {
+		return Objects.requireNonNull(layers, "Layers not set yet").subList(1, layers.size() - 1);
+	}
+}
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java
index 5ee02c57f3ff5a1353c0a50d5f94055e5b9eabe4..a08a6fc2a444b4a30fd4f234acfd4449c075d444 100644
--- a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java
+++ b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/Learner.java
@@ -1,15 +1,18 @@
 package de.tudresden.inf.st.eraser.feedbackloop.api;
 
+import java.io.File;
+import java.io.InputStream;
 import java.net.URL;
-import java.util.ArrayList;
+import java.util.List;
+
+import org.encog.util.arrayutil.NormalizedField;
 
-import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
 import de.tudresden.inf.st.eraser.jastadd.model.Root;
 
 /**
  * 
- * Learner which handles the training and retraining of algorithms 
- * and models used for activity recognition, preference learning and data pre-processing. 
+ * Learner which handles the training and retraining of neural networks 
+ * and models used for activity recognition and  preference learning.
  * 
  * @author Bierzyns - initial contribution
  * 
@@ -19,50 +22,114 @@ public interface Learner {
 	void setKnowledgeBase(Root knowledgeBase);
 	
 	/**
-	 * Method for loading data set, which used for initial training. This method exists only for development purposes. 
+	 * Method for loading data set, which used for initial training. This method exists only for development purposes. The csvFolderPath 
+	 * variable should be set to ensure that the data is read from the correct place.
 	 * 
-	 * @param dataSetName - name of data set that is loaded from the data set folder e.g. data1.arff (Weka)
+	 * @param dataSetName - name of data set that is loaded from the data set folder e.g. data1.csv
+	 * @param targetColumns - number of the columns that contain label of the corresponding csv data set
 	 * @param modelID - ID of the model that will be trained with this data set
 	 * @return true - data set loading was successful
 	 * */
-	boolean loadDataSet(String dataSetName, int modelID);
+	boolean loadDataSet(String dataSetName, List<Integer> targetColumns, int modelID);
+
+	/**
+	 * Method for loading a neural network from a file.
+	 * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
+	 *
+	 * @param file       file to load the model from
+	 * @param modelID - ID of the BasicNetwork.
+	 * @param inputMaxes - list that contains max values of all input columns (sensors) e.g. light intensity 100
+	 * @param inputMins - list that contains min values of all input columns (sensors) e.g. light intensity 0
+	 * @param targetMaxes - list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning
+	 * @param targetMins - list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning
+	 * @return true - model loading was successful
+	 * */
+	boolean loadModelFromFile(File file, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+	                          List<Integer> targetMins);
+
+	/**
+     * Method for loading a neural network from an input stream.
+     * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
+     *
+   	 * @param input       stream to load the model from
+     * @param modelID - ID of the BasicNetwork.
+     * @param inputMaxes - list that contains max values of all input columns (sensors) e.g. light intensity 100
+     * @param inputMins - list that contains min values of all input columns (sensors) e.g. light intensity 0
+     * @param targetMaxes - list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning 
+     * @param targetMins - list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning 
+     * @return true - model loading was successful
+     * */
+	boolean loadModelFromFile(InputStream input, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+	                          List<Integer> targetMins);
 	
 	/**
 	 * Method for the initial training of algorithms and models. That uses external data set for training.
 	 * 
-	 * @param modelID - ID of model that will be trained
+	 * @param inputCount - number of neurons in the input layer [here to simplify code reading]
+     * @param outputCount - number of neurons in the output layer [here to simplify code reading]
+     * @param hiddenCount -number of hidden layers in the network
+     * @param hiddenNeuronCount - number of neurons in the hidden layers for now 
+     * @param modelID - ID of the BasicNetwork.
+     * @param inputMaxes - list that contains max values of all input columns (sensors) e.g. light intensity 100
+     * @param inputMins - list that contains min values of all input columns (sensors) e.g. light intensity 0
+     * @param targetMaxes - list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning 
+     * @param targetMins - list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning 
+     * 
 	 * @return true - training of the model was successful started 
 	 * */
-	boolean train(int modelID);
+	boolean train(int inputCount, int outputCount, int hiddenCount, int hiddenNeuronCount, int modelID,
+            List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+            List<Integer> targetMins);
 	
 	/**
-	 * Method for the initial training of algorithms and models. That uses data set provided by Knowledge Base. Format of Data?
+	 * Method for the initial training of algorithms and models. That uses data set provided by Knowledge Base. 
 	 * 
-	 * @param set - data set on which the training is based on
-	 * @param modelID - ID of model that will be trained
+	 * @param data - data set on which the training is based on
+	 * @param inputCount - number of neurons in the input layer [here to simplify code reading]
+     * @param outputCount - number of neurons in the output layer [here to simplify code reading]
+     * @param hiddenCount - number of hidden layers in the network
+     * @param hiddenNeuronCount - number of neurons in the hidden layers for now 
+     * @param modelID - ID of the BasicNetwork.
+     * @param inputMaxes - list that contains max values of all input columns (sensors) e.g. light intensity 100
+     * @param inputMins - list that contains min values of all input columns (sensors) e.g. light intensity 0
+     * @param targetMaxes - list that contains max values of all output columns (results) e.g. brigthness 100 for preference learning 
+     * @param targetMins - list that contains min values of all output columns (results) e.g. brigthness 0 for preference learning 
 	 * @return true - training of the model was successful started 
 	 * */
-	boolean train(ArrayList<Integer> set, int modelID);
+	boolean train(double[][] data, int inputCount, int outputCount, int hiddenCount, int hiddenNeuronCount, int modelID,
+            List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+            List<Integer> targetMins, List<Integer> targetColumns);
 	
 	/**
-	 * Method for adapting existing models. Format of new data?
+	 * Method for adapting existing models.
 	 * 
-	 * @param set - data set on which the retraining is based on
+	 * @param data - data set on which the retraining is based on
+	 * @param targetColumns - number of the columns thta contain labels/target
 	 * @param modelID - ID of the model that will be retrained
 	 * @return true - retraining of model was started successfully
 	 * */
-	boolean reTrain(ArrayList<Integer> set, int modelID);
+	boolean reTrain(double[][] data, List<Integer> targetColumns, int modelID);
 	
 	/**
 	 * Method for getting the information about an trained model.
 	 * 
-	 *  @param modelID - ID of the model of which information are requested from
-	 *  @return Model - Object that contains the information of the requested model  
+	 * @param modelID - ID of the model of which information is requested from
+	 * @return Model - Object that contains the information of the requested model  
 	 * */
-	Model getTrainedModel(int modelID);
-
-	Model getTrainedModel(URL url, int modelID);
+	EncogModel getTrainedModel(int modelID);
 
-	ArrayList<Double> getNormalizer(int modelID, ArrayList<Integer> list);
+	@Deprecated
+	EncogModel getTrainedModel(URL url, int modelID);
+	
+	/**
+	 * 
+	 * Method for getting normalizer of a model for a specific column/input.
+	 * 
+	 * @param modelID - ID of the model of which normalizer is requested from
+	 * @param columnNr - number of columns the normalizer can be used for
+	 * 
+	 * @return {@link NormalizedField} - the normalizer
+	 * */
+	NormalizedField getNormalizerInput(int modelID, int columnNr);
 
 }
diff --git a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java b/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java
deleted file mode 100644
index 2b24c4267addb456b63eee9afc16b061c738c9c9..0000000000000000000000000000000000000000
--- a/feedbackloop.api/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/api/model/Model.java
+++ /dev/null
@@ -1,106 +0,0 @@
-package de.tudresden.inf.st.eraser.feedbackloop.api.model;
-
-import java.text.Normalizer;
-import java.util.ArrayList;
-import org.encog.ml.data.versatile.NormalizationHelper;
-
-/**
- * 
- * This class represents an object that contains all information of a trained model.
- * 
- * @author Bierzyns - initial contribution
- * */
-public class Model {
-	/**
-	 * todo
-	 */
-
-	private String modelType;
-	private ArrayList weights;
-
-	private Integer inputLayerNumber;
-	private Integer hiddenLayerNumber;
-	private Integer outputLayerNumber;
-
-	private String inputActivationFunction;
-	private String hiddenActivationFunction;
-	private String outputActivationFunction;
-
-	private Integer inputBiasNumber;
-	private Integer hiddenBiasNumber;
-	private NormalizationHelper helper;
-	
-	public Model(String model) {
-
-		modelType=model;
-	}
-	/**
-	 * Getter for model type.
-	 * 
-	 * @return modelType - Possible results: NN or DT
-	 * */
-
-	public String getModelType(){return this.modelType;}
-
-	public ArrayList<Double> getWeights(){
-		return this.weights;
-	}
-
-	public void setWeights(ArrayList<Double> weights){
-		this.weights=weights;
-	}
-
-	public void setInputLayerNumber(int Number){this.inputLayerNumber=Number;}
-	public int getInputLayerNumber(){
-		return this.inputLayerNumber;
-	}
-
-	public void setHiddenLayerNumber(int Number){this.hiddenLayerNumber=Number;}
-	public int gethiddenLayerNumber(){
-		return this.hiddenLayerNumber;
-	}
-
-	public void setOutputLayerNumber(int Number){this.outputLayerNumber=Number;}
-	public int getOutputLayerNumber(){
-		return this.outputLayerNumber;
-	}
-	public void setInputActivationFunction(String function){
-		this.inputActivationFunction=function;
-	}
-	public String getInputActivationFunction(){
-		return this.inputActivationFunction;
-	}
-	public void setHiddenActivationFunction(String function){
-		this.hiddenActivationFunction=function;
-	}
-	public String getHiddenActivationFunction(){
-		return this.hiddenActivationFunction;
-	}
-	public void setOutputActivationFunction(String function){
-		this.outputActivationFunction=function;
-	}
-	public String getOutputActivationFunction(){
-		return this.outputActivationFunction;
-	}
-
-	public void setInputBias(int Number){
-		this.inputBiasNumber=Number;
-	}
-	public int getInputBias(){
-		return this.inputBiasNumber;
-	}
-
-	public void setHiddenBias(int Number){
-		this.hiddenBiasNumber=Number;
-	}
-
-	public int getHiddenBias(){
-		return this.hiddenBiasNumber;
-	}
-	//public void setNormalizer(NormalizationHelper helper){
-		//this.helper=helper;
-	//}
-	//public NormalizationHelper getNormalizer(){
-		//return this.helper;
-	//}
-}
diff --git a/feedbackloop.api/src/main/resources/log4j2.xml b/feedbackloop.api/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.api/src/main/resources/log4j2.xml
+++ b/feedbackloop.api/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/feedbackloop.execute/build.gradle b/feedbackloop.execute/build.gradle
index b59641cfc6d47d4dac802ed0387bdd38806b3004..077136f33bd04ccde2acb2cce23df7ba22c69c7a 100644
--- a/feedbackloop.execute/build.gradle
+++ b/feedbackloop.execute/build.gradle
@@ -9,11 +9,6 @@ apply plugin: 'java'
 dependencies {
     compile project(':eraser-base')
     compile project(':feedbackloop.api')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 sourceSets {
diff --git a/feedbackloop.execute/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImpl.java b/feedbackloop.execute/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImpl.java
index 1fa433ad425df55e2d9c44b66c396786cddea84e..80d2346312445914306e6c224d1f9de1b766cf6d 100644
--- a/feedbackloop.execute/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImpl.java
+++ b/feedbackloop.execute/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImpl.java
@@ -61,7 +61,7 @@ public class ExecuteImpl implements Execute {
   }
 
   private void resolveOrLogError(String itemId, Consumer<? super Item> consumer) {
-    Optional<Item> optionalItem = knowledgeBase.resolveItem(itemId);
+    Optional<Item> optionalItem = knowledgeBase.getOpenHAB2Model().resolveItem(itemId);
     if (!optionalItem.isPresent()) {
       logger.warn("Could not resolve '{}' as an item.", itemId);
     }
diff --git a/feedbackloop.execute/src/main/resources/log4j2.xml b/feedbackloop.execute/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.execute/src/main/resources/log4j2.xml
+++ b/feedbackloop.execute/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/feedbackloop.execute/src/test/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImplTest.java b/feedbackloop.execute/src/test/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImplTest.java
index 66823f1459c45fccc007815b3004527b62159be6..f3f79ac72847b1c8c251c96a6d011014479f7115 100644
--- a/feedbackloop.execute/src/test/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImplTest.java
+++ b/feedbackloop.execute/src/test/java/de/tudresden/inf/st/eraser/feedbackloop/execute/ExecuteImplTest.java
@@ -21,7 +21,7 @@ public class ExecuteImplTest {
   @Test
   public void testColorControlledByOneNumber() {
     ModelAndItem mai = TestUtils.createModelAndItem(0);
-    Root model = mai.model;
+    OpenHAB2Model model = mai.model;
     NumberItem numberItem = mai.item;
 
     ColorItem lamp = new ColorItem();
@@ -33,7 +33,7 @@ public class ExecuteImplTest {
     numberItem.addControlling(lamp);
 
     Execute execute = new ExecuteImpl();
-    execute.setKnowledgeBase(model);
+    execute.setKnowledgeBase(model.getRoot());
 
     Assert.assertEquals(0, numberItem.getState(), DELTA);
     Assert.assertEquals(TupleHSB.of(0, 0, 0), lamp.getState());
@@ -47,7 +47,7 @@ public class ExecuteImplTest {
 
   @Test
   public void testColorControlledByOneBoolean() {
-    Root model = TestUtils.createModelAndItem(0).model;
+    OpenHAB2Model model = TestUtils.createModelAndItem(0).model;
 
     ItemWithBooleanState button = new SwitchItem();
     button.setID("button");
@@ -64,7 +64,7 @@ public class ExecuteImplTest {
     button.addControlling(lamp);
 
     Execute execute = new ExecuteImpl();
-    execute.setKnowledgeBase(model);
+    execute.setKnowledgeBase(model.getRoot());
 
     Assert.assertFalse(button.getState());
     Assert.assertEquals(TupleHSB.of(0, 0, 0), lamp.getState());
@@ -79,7 +79,7 @@ public class ExecuteImplTest {
   @Test
   public void testColorControlledByMany() {
     ModelAndItem mai = TestUtils.createModelAndItem(0);
-    Root model = mai.model;
+    OpenHAB2Model model = mai.model;
     NumberItem numberItem = mai.item;
 
     Group g = TestUtils.getDefaultGroup(model);
@@ -108,7 +108,7 @@ public class ExecuteImplTest {
     lamp.addControlledBy(colorItem);
 
     Execute execute = new ExecuteImpl();
-    execute.setKnowledgeBase(model);
+    execute.setKnowledgeBase(model.getRoot());
 
     Assert.assertEquals(0, numberItem.getState(), DELTA);
     Assert.assertEquals("0", stringItem.getState());
diff --git a/feedbackloop.learner/build.gradle b/feedbackloop.learner/build.gradle
index c3303f2d9f2835666f08b8ee7748a9c8d100e7eb..ea041e50cc38e90c333527bf039567860b29f7a8 100644
--- a/feedbackloop.learner/build.gradle
+++ b/feedbackloop.learner/build.gradle
@@ -10,12 +10,7 @@ apply plugin: 'application'
 dependencies {
     compile project(':eraser-base')
     compile project(':feedbackloop.api')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
-    compile 'org.encog:encog-core:3.4'
+    compile group: 'org.encog', name: 'encog-core', version: '3.4'
 }
 
 run {
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Dataset.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Dataset.java
new file mode 100644
index 0000000000000000000000000000000000000000..0b09cbd2c73a753fa2de5b74b8bbe509e8e75a36
--- /dev/null
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Dataset.java
@@ -0,0 +1,36 @@
+package de.tudresden.inf.st.eraser.feedbackloop.learner;
+
+import org.encog.util.csv.ReadCSV;
+
+import java.util.List;
+
+/**
+ * This class is an representation of a data set in csv format.
+ * The data is saved and made accessible by {@link ReadCSV} and the numbers of the target columns is saved as meta-data.
+ * 
+ *  @author Bierzyns - initial contribution
+ * 
+ * */
+public class Dataset {
+
+    private final ReadCSV csv;
+    private final List<Integer> targetColumns;
+
+    public Dataset(ReadCSV csv, List<Integer> targetColumns) {
+        this.csv = csv;
+        this.targetColumns = targetColumns;
+    }
+
+    public int getColumnCount() {
+        return csv.getColumnCount();
+    }
+
+    public List<Integer> getTargetColumns() {
+        return targetColumns;
+    }
+
+    public ReadCSV getCsv() {
+        return csv;
+    }
+
+}
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerHelper.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerHelper.java
new file mode 100644
index 0000000000000000000000000000000000000000..f23b58105b78ea6b84a3f11211e843eb28f2cf13
--- /dev/null
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerHelper.java
@@ -0,0 +1,150 @@
+package de.tudresden.inf.st.eraser.feedbackloop.learner;
+
+import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
+import de.tudresden.inf.st.eraser.jastadd.model.*;
+import org.apache.commons.math3.stat.StatUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.encog.engine.network.activation.ActivationFunction;
+import org.encog.engine.network.activation.ActivationLinear;
+import org.encog.engine.network.activation.ActivationSigmoid;
+import org.encog.engine.network.activation.ActivationTANH;
+import org.encog.neural.networks.layers.Layer;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Transformation of a {@link EncogModel} into a {@link NeuralNetworkRoot}.
+ *
+ * @author rschoene - Initial contribution
+ */
+public class LearnerHelper {
+
+  private static final Logger logger = LogManager.getLogger(LearnerHelper.class);
+
+  // Activation Functions
+  private static DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
+  private static DoubleArrayDoubleFunction tanh = inputs -> Math.tanh(Arrays.stream(inputs).sum());
+  private static DoubleArrayDoubleFunction function_one = inputs -> 1.0;
+
+  public static NeuralNetworkRoot transform(EncogModel encogModel) {
+    NeuralNetworkRoot result = NeuralNetworkRoot.createEmpty();
+    List<Double> weights = encogModel.getWeights();
+    logger.debug("Got {} weights", weights.size());
+
+    List<List<Neuron>> allNeurons = new ArrayList<>();
+    // inputs
+    Layer inputLayer = encogModel.getInputLayer();
+    reportLayer("input", inputLayer);
+    List<Neuron> inputNeurons = new ArrayList<>();
+    for (int i = 0; i < nonBiasNeuronCount(inputLayer); ++i) {
+      InputNeuron inputNeuron = new InputNeuron();
+      result.addInputNeuron(inputNeuron);
+      inputNeurons.add(inputNeuron);
+    }
+    addBiasIfNeeded(inputLayer, result.getHiddenNeuronList(), inputNeurons);
+    allNeurons.add(inputNeurons);
+
+    // hidden layer
+    List<Neuron> currentNeurons;
+    for (Layer hiddenLayer : encogModel.getHiddenLayers()) {
+      reportLayer("one hidden", hiddenLayer);
+      currentNeurons = new ArrayList<>();
+      allNeurons.add(currentNeurons);
+      for (int i = 0; i < nonBiasNeuronCount(hiddenLayer); ++i) {
+        HiddenNeuron hiddenNeuron = new HiddenNeuron();
+        setActivationFunction(hiddenNeuron, hiddenLayer.getActivationFunction());
+        result.addHiddenNeuron(hiddenNeuron);
+        currentNeurons.add(hiddenNeuron);
+      }
+      addBiasIfNeeded(hiddenLayer, result.getHiddenNeuronList(), currentNeurons);
+    }
+
+    // output layer
+    OutputLayer outputLayer = new OutputLayer();
+    Layer modelOutputLayer = encogModel.getOutputLayer();
+    reportLayer("output", modelOutputLayer);
+    List<Neuron> outputNeurons = new ArrayList<>();
+    for (int i = 0; i < nonBiasNeuronCount(modelOutputLayer); ++i) {
+      OutputNeuron outputNeuron = new OutputNeuron();
+      setActivationFunction(outputNeuron, modelOutputLayer.getActivationFunction());
+      outputLayer.addOutputNeuron(outputNeuron);
+      outputNeurons.add(outputNeuron);
+    }
+    result.setOutputLayer(outputLayer);
+    allNeurons.add(outputNeurons);
+    logger.debug("Created a total of {} neurons",
+        allNeurons.stream()
+            .map(list -> Integer.toString(list.size()))
+            .collect(Collectors.joining("+")));
+
+    // set weights from back to front, and from top to bottom
+    int weightIndex = 0;
+    for (int layer = allNeurons.size() - 1; layer > 0; --layer) {
+      List<Neuron> rightList = allNeurons.get(layer);
+      List<Neuron> leftList = allNeurons.get(layer - 1);
+      for (int rightIndex = 0; rightIndex < rightList.size(); rightIndex++) {
+        for (int leftIndex = 0; leftIndex < leftList.size(); leftIndex++) {
+          if (rightList.get(rightIndex) instanceof BiasNeuron) {
+            continue;
+          }
+          leftList.get(leftIndex).connectTo(rightList.get(rightIndex), weights.get(weightIndex++));
+        }
+      }
+    }
+    if (weightIndex != weights.size()) {
+      logger.error("No all weights used (only {} of {}). Loaded wrong model!", weightIndex, weights.size());
+    }
+
+    outputLayer.setCombinator(LearnerHelper::predictor);
+    logger.info("Created model with {} input, {} hidden and {} output neurons",
+        result.getNumInputNeuron(), result.getNumHiddenNeuron(), result.getOutputLayer().getNumOutputNeuron());
+    return result;
+  }
+
+  private static void addBiasIfNeeded(Layer layer, JastAddList<HiddenNeuron> neuronList, List<Neuron> localNeuronList) {
+    if (layer.hasBias()) {
+      BiasNeuron bias = new BiasNeuron();
+      neuronList.add(bias);
+      localNeuronList.add(bias);
+    }
+  }
+
+  private static int nonBiasNeuronCount(Layer layer) {
+    return layer.getNeuronCount() - (layer.hasBias() ? 1 : 0);
+  }
+
+  private static void reportLayer(String name, Layer layer) {
+    logger.debug("{} layer has {} neurons {}",
+        name, layer.getNeuronCount(), layer.hasBias() ? "(including bias)" : "");
+  }
+
+  private static void setActivationFunction(HiddenNeuron neuron, ActivationFunction function) {
+    if (function instanceof ActivationTANH) {
+      neuron.setActivationFormula(tanh);
+    } else if (function instanceof ActivationLinear) {
+      neuron.setActivationFormula(function_one);
+    } else if (function instanceof ActivationSigmoid) {
+      neuron.setActivationFormula(sigmoid);
+    } else {
+      throw new IllegalArgumentException("Unknown activation function " + function.getClass().getName());
+    }
+  }
+
+    private static double predictor(double[] inputs) {
+    int index = 0;
+    double maxInput = StatUtils.max(inputs);
+    for (int i = 0; i < inputs.length; i++) {
+      if (inputs[i] == maxInput) {
+        index = i;
+      }
+    }
+    //outputs from learner
+    final double[] outputs = new double[]{2.0, 1.0, 3.0, 0.0};
+    return outputs[index];
+  }
+
+}
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java
index c9d79dbba376b13554a881bfc9dc3fc2df0f0849..83edf6b7f195d9be6420369d4590955c42449735 100644
--- a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/LearnerImpl.java
@@ -1,335 +1,230 @@
 package de.tudresden.inf.st.eraser.feedbackloop.learner;
 
-import java.io.*;
-import java.net.URL;
-import java.util.ArrayList;
-
+import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
 import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
-import de.tudresden.inf.st.eraser.feedbackloop.api.model.Model;
-import de.tudresden.inf.st.eraser.jastadd.model.*;
-
-import org.apache.commons.math3.stat.StatUtils;
+import de.tudresden.inf.st.eraser.jastadd.model.Root;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
-import org.encog.Encog;
-import org.encog.ml.MLClassification;
-import org.encog.ml.data.MLData;
+import org.encog.neural.flat.FlatNetwork;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.encog.neural.networks.layers.Layer;
+import org.encog.util.arrayutil.NormalizedField;
 import org.encog.util.csv.CSVFormat;
 import org.encog.util.csv.ReadCSV;
-import org.encog.util.simple.EncogUtility;
-
-import org.encog.ml.data.versatile.NormalizationHelper;
-import org.encog.ml.data.versatile.VersatileMLDataSet;
-import org.encog.ml.data.versatile.columns.ColumnDefinition;
-import org.encog.ml.data.versatile.columns.ColumnType;
-import org.encog.ml.data.versatile.sources.VersatileDataSource;
-import org.encog.ml.data.versatile.sources.CSVDataSource;
-import org.encog.ml.factory.MLMethodFactory;
-import org.encog.ml.model.EncogModel;
-import org.encog.ConsoleStatusReportable;
-import org.encog.ml.MLRegression;
-
-import java.util.Arrays;
-import static org.encog.persist.EncogDirectoryPersistence.*;
-
 
 import java.io.File;
-import java.util.List;
+import java.io.InputStream;
+import java.net.URL;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.*;
 
 /**
  * Implementation of the Learner.
  *
  * @author Bierzyns - Initial contribution
  */
-public class LearnerImpl implements Learner{
-	/**
-	 * todo
-	 */
-
-	private Root knowledgeBase;
-	private Logger logger = LogManager.getLogger(LearnerImpl.class);
-	private String saveFile="src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/";
-	private String loadFile;
-
-
-	@Override
-	public void setKnowledgeBase(Root knowledgeBase) {
-		 this.knowledgeBase = knowledgeBase;
-	}
-
-	/**
-	 * todo
-	 * @param dataSetName - name of data set that is loaded from the data set folder e.g. data1.arff (Weka)
-	 * @param modelID - ID of the model that will be trained with this data set
-	 * @return
-	 */
-
-	@Override
-	public boolean loadDataSet(String dataSetName, int modelID) {
-		loadFile = saveFile + dataSetName;
-		return false;
-	}
-
-	/**
-	 * todo
-	 * @param modelID - ID of model that will be trained
-	 * @return
-	 */
-
-	@Override
-	public boolean train(int modelID) {
-		// Method for the initial training of algorithms and models. That uses external data set for training.
-
-		int modelId= modelID;
-		String File = loadFile;
-		String savefile = saveFile + modelID+".eg";
-		File file = new File(File);
-		VersatileDataSource source = new CSVDataSource(file, false, CSVFormat.DECIMAL_POINT);
-		VersatileMLDataSet data = new VersatileMLDataSet(source);
-		data.defineSourceColumn("monat", 0, ColumnType.continuous);
-		data.defineSourceColumn("day", 1, ColumnType.continuous);
-		data.defineSourceColumn("hour", 2, ColumnType.continuous);
-		data.defineSourceColumn("minute", 3, ColumnType.continuous);
-		ColumnDefinition outputColumn = data.defineSourceColumn("labels", 4, ColumnType.nominal);
-		data.analyze();
-		data.defineSingleOutputOthersInput(outputColumn);
-		EncogModel model = new EncogModel(data);
-		model.selectMethod(data, MLMethodFactory.TYPE_FEEDFORWARD);
-		data.normalize();
-		model.holdBackValidation(0.3, true, 1001);
-		model.selectTrainingType(data);
-		MLRegression bestMethod = (MLRegression)model.crossvalidate(5, true);
-		NormalizationHelper helper = data.getNormHelper();
-		// save network...
-		//to delete
-		saveObject(new File(savefile), bestMethod);
-		Encog.getInstance().shutdown();
-
-		return true;
-	}
-
-	/**
-	 * todo
-	 * @param set - data set on which the training is based on
-	 * @param modelID - ID of model that will be trained
-	 * @return
-	 */
-
-	@Override
-	public boolean train(ArrayList<Integer> set, int modelID) {
-		// 1. convert the set into csv data list
-		try{
-			convertFromArraystoCSV(set, modelID, false);}
-		catch(IOException e) {
-			e.printStackTrace();
-		}
-		// 2. load the csv data and data normalize
-		String datasetName = String.valueOf(modelID)+".csv";
-		loadDataSet(datasetName,modelID);
-		// 3. data train and save the Encog model
-		train(modelID);
-		logger.info("model"+modelID+"has been trained");
-		return true;
-	}
-
-	/**
-	 * todo
-	 * @param set - data set on which the retraining is based on
-	 * @param modelID - ID of the model that will be retrained
-	 * @return
-	 */
-
-	@Override
-	public boolean reTrain(ArrayList<Integer> set, int modelID) {
-		// 1. add new data set to the existing data set
-		logger.info("model"+modelID+" will be retrained");
-		try{
-			convertFromArraystoCSV(set, modelID, true);}
-		catch(IOException e) {
-			e.printStackTrace();
-		}
-		// 2. data normalize again
-		String datasetName = String.valueOf(modelID)+".csv";
-		loadDataSet(datasetName,modelID);
-		// 3. get new Encog model and save it with the modelID
-		train(modelID);
-		logger.info("model"+modelID+" has been retrained");
-		return true;
-	}
-
-	/**
-	 * todo
-	 * @param modelID - ID of the model of which information are requested from
-	 * @return
-	 */
-	@Override
-	public Model getTrainedModel(int modelID) {
-		String file = saveFile + String.valueOf(modelID) + ".eg";
-		try (BufferedReader br = new BufferedReader(new FileReader(file))) {
-			return getTrainedModel(br, modelID);
-		} catch (IOException e) {
-			logger.catching(e);
-			return null;
-		}
-	}
-
-	@Override
-	public Model getTrainedModel(URL url, int modelID) {
-		try (BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream()))) {
-			return getTrainedModel(br, modelID);
-		} catch (IOException e) {
-			logger.catching(e);
-			return null;
-		}
-	}
-
-	private Model getTrainedModel(BufferedReader reader, int modelID) {
-		//TODO
-		// read from Encog model file
-		//model infos: 1. neutral network type, how many layers, how many neutrons in each layer, weights,
-		// activation functions, normalize infos max min ...
-		Model model=new Model("NN");
-		Integer k = 0;
-		ArrayList<Double> weights=new ArrayList<Double>();
-		String st;
-		String[] split_st;
-		String st_part;
-		String[] split_st_part;
-			try{
-				while ((st = reader.readLine()) != null){
-
-					if(st.contains("weights")){
-						split_st = st.split("=");
-						st_part=split_st[1];
-						split_st_part=st_part.split(",");
-						for (int i = 0; i < (split_st_part.length); i++){
-							weights.add(Double.valueOf(split_st_part[i]));
-							model.setWeights(weights);
-						}
-					}
-					//layerCounts=4,8,5
-					if(st.contains("layerFeedCounts")){
-						split_st = st.split("=");
-						st_part=split_st[1];
-						split_st_part=st_part.split(",");
-						if(split_st_part.length==3) {
-							int inputLayerNumber=Integer.valueOf(split_st_part[2]);
-							int hiddenLayerNumber=Integer.valueOf(split_st_part[1]);
-							int outputLayerNumber=Integer.valueOf(split_st_part[0]);
-							model.setInputLayerNumber(inputLayerNumber);
-							model.setHiddenLayerNumber(hiddenLayerNumber);
-							model.setOutputLayerNumber(outputLayerNumber);
-						}else{logger.info("oh my god Neutral Network is more than 3 layers");}
-					}
-					if(st.contains("org")){
-						split_st = st.split("\\.");
-						st_part=split_st[split_st.length-1];
-						split_st_part=st_part.split("\"");
-						if(k == 0){
-							model.setOutputActivationFunction(split_st_part[0]);
-						} else if (k == 1){
-							model.setHiddenActivationFunction(split_st_part[0]);
-						} else if(k == 2){
-							model.setInputActivationFunction(split_st_part[0]);
-						}
-						k += 1;
-					}
-					if(st.contains("biasActivation")){
-						split_st = st.split("=");
-						st_part=split_st[1];
-						split_st_part=st_part.split(",");
-						if(split_st_part.length==3) {
-							int inputBiasNumber=Integer.valueOf(split_st_part[2]);
-							int hiddenBiasNumber=Integer.valueOf(split_st_part[1]);
-							model.setInputBias(inputBiasNumber);
-							model.setHiddenBias(hiddenBiasNumber);
-						}else{logger.info("oh my god Neutral Network is more than 3 layers");}
-
-					}
-				}
-			}
-			catch(IOException e) {
-				e.printStackTrace();
-			}
-
-		//NormalizationHelper helper= getNormalizer(modelID);
-		//model.setNormalizer(helper);
-		return model;
-	}
-
-	/**
-	 *
-	 * @param modelID
-	 * @return
-	 */
-	public ArrayList<Double> getNormalizer(int modelID, ArrayList<Integer> list){
-		String File;
-		int model_id= modelID;
-		if(model_id == 1){
-			File=saveFile + "initial_data.csv";
-		}else {
-			File=saveFile + model_id+".csv";
-		}
-		File file = new File(File);
-		VersatileDataSource source = new CSVDataSource(file, false, CSVFormat.DECIMAL_POINT);
-		VersatileMLDataSet data = new VersatileMLDataSet(source);
-		data.defineSourceColumn("monat", 0, ColumnType.continuous);
-		data.defineSourceColumn("day", 1, ColumnType.continuous);
-		data.defineSourceColumn("hour", 2, ColumnType.continuous);
-		data.defineSourceColumn("minute", 3, ColumnType.continuous);
-		ColumnDefinition outputColumn = data.defineSourceColumn("labels", 4, ColumnType.nominal);
-		data.analyze();
-		data.defineSingleOutputOthersInput(outputColumn);
-		EncogModel model = new EncogModel(data);
-		model.selectMethod(data, MLMethodFactory.TYPE_FEEDFORWARD);
-		data.normalize();
-		NormalizationHelper helper = data.getNormHelper();
-
-		ArrayList<Integer> list3 = list;
-		String[] line = new String[4];
-		MLData input = helper.allocateInputVector();
-		line[0] = String.valueOf(list3.get(0));
-		line[1] = String.valueOf(list3.get(1));
-		line[2] = String.valueOf(list3.get(2));
-		line[3] = String.valueOf(list3.get(3));
-		helper.normalizeInputVector(line,input.getData(),false);
-
-		String helperstr=helper.toString();
-		String [] split=helperstr.split(";");
-		String [] finalStr = split[split.length-1].replace("]","").replace("[","").
-				split(",");
-		System.out.println("helperstr:"+ finalStr[0]);
-
-		String inputStr=input.toString();
-		String str=inputStr.replace("[BasicMLData:","").replace("]","");
-		String [] split1 = str.split(",");
-		ArrayList<Double> normalizedInputandOuput = new ArrayList<Double> (Arrays.asList(Double.valueOf(split1[0]),
-				Double.valueOf(split1[1]), Double.valueOf(split1[2]),Double.valueOf(split1[3]), Double.valueOf(finalStr[0]),
-				Double.valueOf(finalStr[1]),Double.valueOf(finalStr[2]), Double.valueOf(finalStr[3])));
-		return normalizedInputandOuput;
-	}
-	/**
-	 * @param list history values/new values from the system
-	 * @param modelID ML Model ID
-	 * @param initial if false, writer will append from the last word of the file
-	 * @throws IOException
-	 */
-	public void convertFromArraystoCSV(ArrayList<Integer> list, int modelID, boolean initial)throws IOException{
-		//create a csv data for store the arrays
-		String File = saveFile+ String.valueOf(modelID) + ".csv";
-		FileWriter writer= new FileWriter(File, initial);
-        for (int i = 0; i < (list.size()/5); i++) {
-			writer.append(String.valueOf(list.get(0+i*5)));
-			writer.append(',');
-			writer.append(String.valueOf(list.get(1+i*5)));
-			writer.append(',');
-			writer.append(String.valueOf(list.get(2+i*5)));
-			writer.append(',');
-			writer.append(String.valueOf(list.get(3+i*5)));
-			writer.append(',');
-			writer.append(String.valueOf(list.get(4+i*5)));
-			writer.append('\n');
+public class LearnerImpl implements Learner {
+
+
+  private Root knowledgeBase;
+  private static final Logger logger = LogManager.getLogger(LearnerImpl.class);
+  private Path csvFolderPath = Paths.get("src", "main", "resources");
+  private String modelFolderPath = ".";
+  private CSVFormat format = new CSVFormat('.', ',');
+  private Map<Integer, Dataset> datasets = new HashMap<>();
+  private Map<Integer, Network> models = new HashMap<>();
+
+
+  @Override
+  public void setKnowledgeBase(Root knowledgeBase) {
+    this.knowledgeBase = knowledgeBase;
+  }
+
+  public void setCsvFolderPath(String csvFolderPath) {
+    this.csvFolderPath = Paths.get(csvFolderPath);
+  }
+
+  public void setModelFolderPath(String modelFolderPath) {
+    this.modelFolderPath = modelFolderPath;
+  }
+
+  @Override
+  public boolean loadDataSet(String dataSetName, List<Integer> targetColumns, int modelID) {
+    Path realDataSetPath = csvFolderPath.resolve(dataSetName);
+    logger.debug("Load data set from file {}", realDataSetPath);
+    try {
+      Dataset set = new Dataset(new ReadCSV(realDataSetPath.toFile().getAbsoluteFile(), false, format), targetColumns);
+      datasets.put(modelID, set);
+    } catch (Exception e) {
+      e.printStackTrace();
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public boolean loadModelFromFile(File file, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                                   List<Integer> targetMins) {
+    logger.debug("Load model from file {}", file);
+    models.put(modelID, new Network(file.getAbsolutePath(), modelID, inputMaxes, inputMins, targetMaxes, targetMins));
+    return true;
+  }
+
+  @Override
+  public boolean loadModelFromFile(InputStream input, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                                   List<Integer> targetMins) {
+    logger.debug("Load model from input stream");
+    models.put(modelID, new Network(input, modelID, inputMaxes, inputMins, targetMaxes, targetMins));
+    return true;
+  }
+
+  @Override
+  public boolean train(int inputCount, int outputCount, int hiddenCount, int hiddenNeuronCount, int modelID,
+                       List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                       List<Integer> targetMins) {
+    // Method for the initial training of algorithms and models. That uses external data set for training.
+
+    if (datasets.get(modelID) != null) {
+      Dataset set = datasets.get(modelID);
+
+      ReadCSV csv = set.getCsv();
+
+      Network model = new Network(inputCount, outputCount, hiddenCount, hiddenNeuronCount, modelID, inputMaxes,
+          inputMins, targetMaxes, targetMins);
+
+      ArrayList<Double> input = new ArrayList<>();
+      ArrayList<Double> target = new ArrayList<>();
+
+      while (csv.next()) {
+        logger.debug("Train next csv row");
+        for (int i = 0; i < csv.getColumnCount(); i++) {
+          int col_nr = i + 1;
+          if (set.getTargetColumns().contains(col_nr)) {
+            target.add(csv.getDouble(i));
+          } else {
+            input.add(csv.getDouble(i));
+          }
         }
-        writer.close();
-	}
+
+        model.train(input, target);
+        input.clear();
+        target.clear();
+      }
+
+      models.put(modelID, model);
+      model.saveModel(modelFolderPath);
+
+      return true;
+    }
+    return false;
+  }
+
+  @Override
+  public boolean train(double[][] data, int inputCount, int outputCount, int hiddenCount, int hiddenNeuronCount, int modelID,
+                       List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                       List<Integer> targetMins, List<Integer> targetColumns) {
+
+    Network model = new Network(inputCount, outputCount, hiddenCount, hiddenNeuronCount, modelID, inputMaxes,
+        inputMins, targetMaxes, targetMins);
+
+    return reTrainModel(model, data, targetColumns, modelID);
+  }
+
+
+  @Override
+  public boolean reTrain(double[][] data, List<Integer> targetColumns, int modelID) {
+
+    Network model = models.get(modelID);
+
+    return reTrainModel(model, data, targetColumns, modelID);
+  }
+
+  private boolean reTrainModel(Network model, double[][] data, List<Integer> targetColumns, int modelID) {
+    List<Double> input = new ArrayList<>();
+    List<Double> target = new ArrayList<>();
+
+    for (int i = 0; i < data.length; i++) {
+
+      for (int j = 0; j < data[0].length; j++) {
+        int col_nr = j + 1;
+        if (targetColumns.contains(col_nr)) {
+          target.add(data[i][j]);
+        } else {
+          input.add(data[i][j]);
+        }
+
+        model.train(input, target);
+        input.clear();
+        target.clear(); 
+      }
+
+    }
+
+    models.put(modelID, model);
+    model.saveModel(modelFolderPath);
+
+    return true;
+  }
+
+
+  @Override
+  public EncogModel getTrainedModel(int modelID) {
+    return fillModel(modelID);
+  }
+
+  @Override
+  public EncogModel getTrainedModel(URL url, int modelID) {
+    return fillModel(modelID);
+  }
+
+  private EncogModel fillModel(int modelID) {
+    EncogModel encogModel = new EncogModel("NN");
+    BasicNetwork nn = models.get(modelID).getNetwork();
+
+    ArrayList<Double> weightsList = new ArrayList<>();
+    String weights = nn.dumpWeights();
+    String[] split = weights.split(",");
+
+    for (int i = 0; i < (split.length); i++) {
+      weightsList.add(Double.valueOf(split[i]));
+    }
+
+    encogModel.setWeights(weightsList);
+
+    // do not use getLayers() because it is not restored immediately on load from file
+    FlatNetwork flat = nn.getFlat();
+    List<Layer> layers = new ArrayList<>(flat.getLayerCounts().length);
+    logger.debug("layer counts: {}", Arrays.toString(flat.getLayerCounts()));
+    for (int j = 0; j < flat.getLayerCounts().length; j++) {
+//      boolean hasBias = j != 0 && j != flat.getLayerCounts().length - 1;
+      boolean hasBias = flat.getLayerCounts()[j] != flat.getLayerFeedCounts()[j];
+      Layer l = new BasicLayer(flat.getActivationFunctions()[j], hasBias, flat.getLayerCounts()[j]);
+      l.setBiasActivation(flat.getBiasActivation()[j]);
+      layers.add(0, l);
+    }
+
+    encogModel.setLayers(layers);
+
+    return encogModel;
+  }
+
+
+  /**
+   * @param modelID
+   * @return
+   */
+  public NormalizedField getNormalizerInput(int modelID, int columnNr) {
+    return models.get(modelID).getNormalizersIn().get(columnNr);
+  }
+
+  /**
+   * @param modelID
+   * @return
+   */
+  public NormalizedField getNormalizerTar(int modelID, int columnNr) {
+    return models.get(modelID).getNormalizersTar().get(columnNr);
+  }
+
 }
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java
index e94f6706ac57ecef13c75a384f20325866cf6884..0f82201e8bcffb278656fb821d6efb3e55008791 100644
--- a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Main.java
@@ -1,75 +1,95 @@
 package de.tudresden.inf.st.eraser.feedbackloop.learner;
 
 import de.tudresden.inf.st.eraser.feedbackloop.api.Learner;
-import de.tudresden.inf.st.eraser.feedbackloop.api.model.*;
+import de.tudresden.inf.st.eraser.feedbackloop.api.EncogModel;
 import de.tudresden.inf.st.eraser.jastadd.model.*;
 import org.apache.commons.math3.stat.StatUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
-import org.encog.ml.data.MLData;
-import org.encog.ml.data.versatile.NormalizationHelper;
 
+import java.io.File;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
-import java.util.stream.Collectors;
 
+@SuppressWarnings("unused")
 public class Main {
 	private static final Logger logger = LogManager.getLogger(Main.class);
-	private static ArrayList<Integer> list1;
-	private static ArrayList<Integer> list2;
+	private static class InitialDataConfig {
+		static List<Integer> inputMins = Arrays.asList(
+				7,  // min month
+				1,  // min day
+				10, // min hour
+				1   // min minute
+		);
+		static List<Integer> inputMaxes = Arrays.asList(
+				9,  // max month
+				31, // max day
+				21, // max hour
+				60  // max minute
+		);
+		static List<Integer> targetMins = Collections.singletonList(0);
+		static List<Integer> targetMaxes = Collections.singletonList(3);
+		static String csv_filename = "initial_data.csv";
+		static String encog_filename = Paths.get("src", "main", "resources").toFile().getAbsolutePath() + "/";
+		static int inputCount = 4;
+		static int outputCount = 1;
+		static int hiddenCount = 0;
+		static int hiddenNeuronCount = 7;
+		static List<Integer> targetColumns = Collections.singletonList(4);
+	}
 
 	public static void main(String[] args) {
+//		loadFromCsv();
+		loadFromEncog();
+	}
 
+	private static void loadFromCsv() {
 		Learner learner = new LearnerImpl();
-		//learner.loadDataSet("initial_data.csv",1);
-		//learner.train(1);
-
-		Model model = learner.getTrainedModel(1);
-
-		ArrayList<Double> weights = model.getWeights();
-		int inputLayerNumber = model.getInputLayerNumber();
-		int inputBiasNumber = model.getInputBias();
-		int hiddenLayerNumber = model.gethiddenLayerNumber();
-		int hiddenBiasNumber = model.getHiddenBias();
-		int outputLayerNumber = model.getOutputLayerNumber();
-		String inputActivation = model.getInputActivationFunction();
-		String hiddenActivation = model.getHiddenActivationFunction();
-		String outputActivation = model.getOutputActivationFunction();
-
-		logger.info("Model Typ is: " + model.getModelType());
-		logger.info("Model Weights are: " + model.getWeights());
-		logger.info("Model input normal neutrons: " + model.getInputLayerNumber());
-		logger.info("Model input bias neutron: " + model.getInputBias());
-		logger.info("Model hidden normal neutrons: " + model.gethiddenLayerNumber());
-		logger.info("Model hidden bias neutron: " + model.getHiddenBias());
-		logger.info("Model output neutrons: " + model.getOutputLayerNumber());
-		logger.info("Model input activation function: " + model.getInputActivationFunction());
-		logger.info("Model hidden activation function: " + model.getHiddenActivationFunction());
-		logger.info("Model output activation function: " + model.getOutputActivationFunction());
-		//logger.info("Normalizer helper: " + helper);
-
-
-		//TODO create NN eraser Model
-		//input from system ??? how to get them?
-		ArrayList<Integer> list3 = new ArrayList<Integer>(Arrays.asList(7, 20, 12, 13, 2));
-
-		//ArrayList<Integer> list3 = new ArrayList<Integer>(Arrays.asList(7,24,14,38,2));
-		//normalize the input data
-		ArrayList<Double> normalizedInputandOutput = learner.getNormalizer(1, list3);
-		createBrightnessNetwork(weights, inputBiasNumber, hiddenLayerNumber, hiddenBiasNumber, normalizedInputandOutput);
+		learner.loadDataSet(InitialDataConfig.csv_filename, InitialDataConfig.targetColumns,1);
+		learner.train(
+				InitialDataConfig.inputCount, InitialDataConfig.outputCount, InitialDataConfig.hiddenCount,
+				InitialDataConfig.hiddenNeuronCount, 1, InitialDataConfig.inputMaxes, InitialDataConfig.inputMins,
+				InitialDataConfig.targetMaxes, InitialDataConfig.targetMins);
+
+		printModel(learner.getTrainedModel(1));
+	}
+
+	private static void loadFromEncog() {
+		Learner learner = new LearnerImpl();
+		learner.loadModelFromFile(
+				new File(InitialDataConfig.encog_filename), 1,
+				InitialDataConfig.inputMaxes, InitialDataConfig.inputMins,
+				InitialDataConfig.targetMaxes, InitialDataConfig.targetMins);
+		printModel(learner.getTrainedModel(1));
+    NeuralNetworkRoot eraserModel = LearnerHelper.transform(learner.getTrainedModel(1));
+	}
+
+	private static void printModel(EncogModel encogModel) {
+		logger.info("Model Type is: " + encogModel.getModelType());
+		logger.info("Model Weights are: " + encogModel.getWeights());
+		logger.info("Model layers are: " + encogModel.getLayers());
+//		logger.info("Model input normal neutrons: " + model.getInputLayerNumber());
+//		logger.info("Model input bias neutron: " + model.getInputBias());
+//		logger.info("Model hidden normal neutrons: " + model.gethiddenLayerNumber());
+//		logger.info("Model hidden bias neutron: " + model.getHiddenBias());
+//		logger.info("Model output neutrons: " + model.getOutputLayerNumber());
+//		logger.info("Model input activation function: " + model.getInputActivationFunction());
+//		logger.info("Model hidden activation function: " + model.getHiddenActivationFunction());
+//		logger.info("Model output activation function: " + model.getOutputActivationFunction());
 	}
 
 	//(ArrayList<Integer> list3
 	private static Root createModel(ArrayList<Double> normalizedInputs, int inputBiasNumber) {
 
 		//create KB Model
-		Root model = Root.createEmptyRoot();
+		Root root = Root.createEmptyRoot();
 		Group group = new Group();
 		group.setID("Group1");
-		model.addGroup(group);
+		root.getOpenHAB2Model().addGroup(group);
 
 		NumberItem monthItem = new NumberItem();
 		monthItem.setState(normalizedInputs.get(0));
@@ -102,7 +122,7 @@ public class Main {
 		group.addItem(dayItem);
 		group.addItem(hourItem);
 		group.addItem(minuteItem);
-		return model;
+		return root;
 	}
 
 	/**
@@ -110,7 +130,8 @@ public class Main {
 	 */
 	private static void createBrightnessNetwork(ArrayList<Double> all_weights, int inputBiasNumber, int hiddenlayernumber,
 												int hiddenlayerbias, ArrayList<Double> normalizedInputsandOutput) {
-		Root model = createModel(normalizedInputsandOutput, inputBiasNumber);
+		Root root = createModel(normalizedInputsandOutput, inputBiasNumber);
+		OpenHAB2Model model = root.getOpenHAB2Model();
 		Item monthItem = model.resolveItem("month").orElseThrow(
 				() -> new RuntimeException("Month not found"));
 		Item dayItem = model.resolveItem("day").orElseThrow(
@@ -200,8 +221,8 @@ public class Main {
 				hiddenNeuron.connectTo(output3, weights.get(i + hiddenSum * 3));
 			}
 		}
-		model.getMachineLearningRoot().setPreferenceLearning(nn);
-		System.out.println(model.prettyPrint());
+		root.getMachineLearningRoot().setPreferenceLearning(nn);
+		System.out.println(root.prettyPrint());
 
 		List<String> output = new ArrayList<>();
 		Function<DoubleNumber, String> leafToString = classification -> Double.toString(classification.number);
@@ -235,4 +256,4 @@ public class Main {
 	//outputs:
 	//[BasicMLData:0.36743405976714366,-0.6610085416169492,-0.9999999998849812,-0.9999999224507112]
 
-	}
\ No newline at end of file
+	}
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java
new file mode 100644
index 0000000000000000000000000000000000000000..e3467c7fda1544befee165d0316f11e9b35e8d27
--- /dev/null
+++ b/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/Network.java
@@ -0,0 +1,202 @@
+package de.tudresden.inf.st.eraser.feedbackloop.learner;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.encog.engine.network.activation.ActivationSigmoid;
+import org.encog.ml.data.MLDataSet;
+import org.encog.ml.data.basic.BasicMLDataSet;
+import org.encog.ml.train.MLTrain;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.encog.neural.networks.training.propagation.back.Backpropagation;
+import org.encog.persist.EncogDirectoryPersistence;
+import org.encog.util.arrayutil.NormalizationAction;
+import org.encog.util.arrayutil.NormalizedField;
+import org.encog.util.simple.EncogUtility;
+
+/**
+ * Network class serves as interface to encog BasicNetwork and holdsfunctions for handling the BasicNetwork (training, input, output and inference)
+ *
+ * @author Bierzynski - initial contribution
+ */
+public class Network {
+  private static final Logger logger = LogManager.getLogger(Network.class);
+  private BasicNetwork network;
+  private int modelID;
+  private ArrayList<NormalizedField> normalizersIn;
+  private ArrayList<NormalizedField> normalizersTar;
+
+  /**
+   * Constructor for when the neural network is created from data.
+   *
+   * @param inputCount        number of neurons in the input layer
+   * @param outputCount       number of neurons in the output layer
+   * @param hiddenCount       number of hidden layers in the network
+   * @param hiddenNeuronCount number of neurons in the hidden layers for now
+   * @param modelID           ID of the BasicNetwork.
+   * @param inputMaxes        list that contains max values of all input columns (sensors) e.g. light intensity 100
+   * @param inputMins         list that contains min values of all input columns (sensors) e.g. light intensity 0
+   * @param targetMaxes       list that contains max values of all output columns (results) e.g. brightness 100 for preference learning
+   * @param targetMins        list that contains min values of all output columns (results) e.g. brightness 0 for preference learning
+   */
+  public Network(int inputCount, int outputCount, int hiddenCount, int hiddenNeuronCount, int modelID,
+                 List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                 List<Integer> targetMins) {
+
+    normalizersIn = new ArrayList<>();
+    normalizersTar = new ArrayList<>();
+    this.modelID = modelID;
+
+    network = new BasicNetwork();
+
+    network.addLayer(new BasicLayer(null, true, inputCount));
+
+    for (int i = 0; i < hiddenCount; i++) {
+      network.addLayer(new BasicLayer(new ActivationSigmoid(), true, hiddenNeuronCount));
+    }
+
+    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, outputCount));
+    network.getStructure().finalizeStructure();
+    network.reset();
+
+    addNormalizer(inputMaxes, inputMins, normalizersIn);
+    addNormalizer(targetMaxes, targetMins, normalizersTar);
+  }
+
+  private void addNormalizer(List<Integer> maxes, List<Integer> mins, ArrayList<NormalizedField> normalizers) {
+    for (int j = 0; j < maxes.size(); j++) {
+      NormalizedField normalizer = new NormalizedField("in_" + j, NormalizationAction.Normalize,
+          maxes.get(j), mins.get(j));
+      normalizers.add(normalizer);
+    }
+  }
+
+  /**
+   * Constructor for when the neural network is loaded from a file.
+   * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
+   *
+   * @param path        path to the save folder of the model files e.g. C:\models\
+   * @param modelID     ID of the BasicNetwork.
+   * @param inputMaxes  list that contains max values of all input columns (sensors) e.g. light intensity 100
+   * @param inputMins   list that contains min values of all input columns (sensors) e.g. light intensity 0
+   * @param targetMaxes list that contains max values of all output columns (results) e.g. brightness 100 for preference learning
+   * @param targetMins  list that contains min values of all output columns (results) e.g. brightness 0 for preference learning
+   */
+  public Network(String path, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                 List<Integer> targetMins) {
+    this(() -> (BasicNetwork) EncogDirectoryPersistence.loadObject(new File(path, "NN_" + modelID)), modelID, inputMaxes, inputMins, targetMaxes, targetMins);
+  }
+
+  /**
+   * Constructor for when the neural network is loaded from an input stream.
+   * Please note that the normalizer are note loaded file , because it is assumed that the mins and maxes are saved anyway in the meta data of the data sets or items.
+   *
+   * @param input       stream to load the model from
+   * @param modelID     ID of the BasicNetwork.
+   * @param inputMaxes  list that contains max values of all input columns (sensors) e.g. light intensity 100
+   * @param inputMins   list that contains min values of all input columns (sensors) e.g. light intensity 0
+   * @param targetMaxes list that contains max values of all output columns (results) e.g. brightness 100 for preference learning
+   * @param targetMins  list that contains min values of all output columns (results) e.g. brightness 0 for preference learning
+   */
+  public Network(InputStream input, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                 List<Integer> targetMins) {
+    this(() -> (BasicNetwork) EncogDirectoryPersistence.loadObject(input), modelID, inputMaxes, inputMins, targetMaxes, targetMins);
+  }
+
+  private Network(LoadEncogModel loader, int modelID, List<Integer> inputMaxes, List<Integer> inputMins, List<Integer> targetMaxes,
+                 List<Integer> targetMins) {
+    this.modelID = modelID;
+
+    normalizersIn = new ArrayList<>();
+    normalizersTar = new ArrayList<>();
+
+    network = loader.load();
+
+    addNormalizer(inputMaxes, inputMins, normalizersIn);
+    addNormalizer(targetMaxes, targetMins, normalizersTar);
+  }
+
+  @FunctionalInterface
+  interface LoadEncogModel {
+    BasicNetwork load();
+  }
+
+  /**
+   * Method to save the trained {@link BasicNetwork} to a file.
+   * File name is always NN_modelID
+   *
+   * @param savePath path to the folder in which the model file should be placed
+   */
+  public void saveModel(String savePath) {
+    EncogDirectoryPersistence.saveObject(new File(savePath + "NN_" + modelID), network);
+  }
+
+  /**
+   * Method for training the {@link BasicNetwork}.
+   * One row training is implemented here for now to optimize preference learning.
+   *
+   * @param input  input part of the data row which should be used for training
+   * @param target target/output part of the data row which should be used for training
+   */
+  public void train(List<Double> input, List<Double> target) {
+    double[][] INPUT = new double[1][input.size()];
+    double[][] IDEAL = new double[1][target.size()];
+
+    for (int i = 0; i < input.size(); i++) {
+      INPUT[0][i] = normalizersIn.get(i).normalize(input.get(i));
+    }
+
+    for (int j = 0; j < target.size(); j++) {
+      IDEAL[0][j] = normalizersTar.get(j).normalize(target.get(j));
+    }
+
+    MLDataSet trainingSet = new BasicMLDataSet(INPUT, IDEAL);
+    MLTrain train = new Backpropagation(network, trainingSet, 3.5, 0.3);
+
+    do {
+      train.iteration();
+    } while (train.getError() > 0.005);
+
+    train.finishTraining();
+  }
+
+  /**
+   * Method that uses the {@link BasicNetwork} to predict/classify/.. something based on an input.
+   *
+   * @param inputVector data that should be processed
+   */
+  public double[] computeResult(List<Double> inputVector) {
+    double[] output = new double[normalizersTar.size()];
+    double[] input = new double[inputVector.size()];
+
+    for (int i = 0; i < inputVector.size(); i++) {
+      input[i] = normalizersIn.get(i).normalize(inputVector.get(i));
+    }
+
+    network.compute(input, output);
+
+    for (int j = 0; j < normalizersTar.size(); j++) {
+      output[j] = normalizersTar.get(j).deNormalize(output[j]);
+    }
+
+    return output;
+  }
+
+  public BasicNetwork getNetwork() {
+    return network;
+  }
+
+  public ArrayList<NormalizedField> getNormalizersIn() {
+    return normalizersIn;
+  }
+
+  public ArrayList<NormalizedField> getNormalizersTar() {
+    return normalizersTar;
+  }
+}
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/1.eg b/feedbackloop.learner/src/main/resources/1.eg
similarity index 100%
rename from feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/1.eg
rename to feedbackloop.learner/src/main/resources/1.eg
diff --git a/feedbackloop.learner/src/main/resources/NN_1 b/feedbackloop.learner/src/main/resources/NN_1
new file mode 120000
index 0000000000000000000000000000000000000000..c47a1c155ec39abc4357baa43dbafad60eb2988a
--- /dev/null
+++ b/feedbackloop.learner/src/main/resources/NN_1
@@ -0,0 +1 @@
+1.eg
\ No newline at end of file
diff --git a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/initial_data.csv b/feedbackloop.learner/src/main/resources/initial_data.csv
similarity index 92%
rename from feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/initial_data.csv
rename to feedbackloop.learner/src/main/resources/initial_data.csv
index a1e263194d3ee4f95c64c3eff3b5123fb2246cf5..12c125ce2bbdc9ec6bb9762f05f6a83c3b863bb0 100644
--- a/feedbackloop.learner/src/main/java/de/tudresden/inf/st/eraser/feedbackloop/learner/initial_data.csv
+++ b/feedbackloop.learner/src/main/resources/initial_data.csv
@@ -1,418 +1,418 @@
-7,20,12,13,2
-7,20,14,40,1
-7,20,14,40,2
-7,21,13,2,2
-7,21,13,2,2
-7,21,14,23,2
-7,21,14,23,2
-7,21,15,41,2
-7,21,16,54,2
-7,21,16,54,2
-7,21,17,45,3
-7,22,12,28,3
-7,22,15,35,2
-7,22,15,35,2
-7,22,18,59,3
-7,22,18,59,3
-7,23,12,32,2
-7,23,12,32,2
-7,23,16,7,2
-7,23,16,7,2
-7,23,16,7,2
-7,23,16,7,2
-7,23,16,7,2
-7,24,12,4,0
-7,24,12,4,0
-7,24,12,4,1
-7,24,14,38,2
-7,24,14,38,2
-7,24,18,54,3
-7,25,12,31,0
-7,25,12,32,1
-7,25,12,32,1
-7,25,15,6,3
-7,25,18,56,3
-7,26,13,41,2
-7,26,19,14,3
-7,27,11,39,2
-7,27,11,39,3
-7,27,11,46,3
-7,27,11,46,2
-7,27,13,8,2
-7,27,13,8,2
-7,27,13,9,2
-7,27,13,45,2
-7,27,13,45,2
-7,27,15,38,3
-7,28,12,12,2
-7,28,12,13,2
-7,28,12,41,2
-7,28,12,41,2
-7,28,12,41,2
-7,28,14,0,1
-7,28,14,0,2
-7,28,15,21,3
-7,28,18,56,3
-7,29,10,9,1
-7,29,10,9,1
-7,29,10,9,1
-7,29,11,54,0
-7,29,11,54,0
-7,29,11,54,0
-7,29,11,54,1
-7,29,14,10,2
-7,29,16,44,2
-7,29,16,44,2
-7,30,16,7,3
-7,30,18,45,3
-7,31,13,2,0
-7,31,13,2,1
-7,31,13,3,1
-7,31,13,3,1
-7,31,13,3,1
-7,31,18,39,3
-8,1,12,22,0
-8,1,12,22,1
-8,1,14,20,2
-8,1,14,20,2
-8,1,14,20,2
-8,1,15,55,3
-8,1,18,31,3
-8,1,18,37,3
-8,1,18,37,3
-8,1,19,2,3
-8,1,19,2,3
-8,1,20,5,3
-8,2,10,9,2
-8,2,10,9,1
-8,2,10,9,2
-8,2,10,9,2
-8,2,13,58,2
-8,2,13,58,2
-8,2,15,44,3
-8,2,15,44,3
-8,2,15,44,3
-8,2,17,21,3
-8,2,17,21,3
-8,2,17,21,3
-8,3,13,31,1
-8,3,13,31,2
-8,3,13,32,2
-8,3,16,43,3
-8,4,13,20,1
-8,4,13,20,2
-8,4,18,27,3
-8,5,13,37,2
-8,5,13,37,2
-8,5,18,33,3
-8,6,11,24,3
-8,6,11,24,3
-8,6,11,24,3
-8,6,13,50,3
-8,7,13,4,2
-8,7,13,4,2
-8,7,14,56,3
-8,8,12,13,2
-8,8,12,13,2
-8,8,15,51,2
-8,8,15,51,2
-8,8,15,51,3
-8,9,13,32,2
-8,9,13,32,2
-8,9,13,32,2
-8,9,15,8,2
-8,9,15,8,2
-8,9,15,8,2
-8,9,16,19,2
-8,10,11,32,0
-8,10,11,32,1
-8,10,11,32,1
-8,10,13,13,1
-8,10,13,13,1
-8,10,13,13,2
-8,10,16,42,3
-8,10,16,42,3
-8,11,14,6,2
-8,11,14,7,2
-8,11,18,54,3
-8,11,18,54,3
-8,11,18,54,3
-8,12,12,27,1
-8,12,12,27,1
-8,12,12,28,1
-8,12,13,53,2
-8,12,13,53,2
-8,12,13,53,2
-8,12,15,21,3
-8,13,13,16,1
-8,13,13,16,1
-8,13,13,16,1
-8,13,14,14,2
-8,13,14,14,2
-8,13,16,11,3
-8,13,17,18,3
-8,14,13,7,1
-8,14,13,7,1
-8,14,13,7,1
-8,14,13,7,1
-8,14,13,7,2
-8,14,13,7,2
-8,14,15,6,3
-8,15,14,5,2
-8,15,14,5,2
-8,15,14,6,2
-8,15,14,6,2
-8,15,16,41,3
-8,15,16,41,3
-8,15,17,30,3
-8,16,13,40,2
-8,16,13,40,2
-8,16,17,52,3
-8,16,17,53,3
-8,17,13,34,1
-8,17,13,35,2
-8,17,14,7,2
-8,17,19,2,3
-8,18,10,21,3
-8,18,11,14,2
-8,18,11,14,2
-8,18,11,14,2
-8,18,11,14,2
-8,18,14,25,2
-8,18,14,25,3
-8,18,14,25,2
-8,18,18,18,3
-8,18,18,19,3
-8,19,18,33,3
-8,19,18,33,3
-8,19,18,33,3
-8,19,18,33,3
-8,20,14,28,2
-8,20,14,28,2
-8,20,14,28,2
-8,20,14,28,2
-8,20,17,8,3
-8,20,18,22,3
-8,21,11,24,1
-8,21,11,24,1
-8,21,11,24,1
-8,21,15,34,3
-8,21,18,55,3
-8,22,12,3,1
-8,22,12,4,2
-8,22,12,4,2
-8,22,13,51,2
-8,22,13,51,2
-8,22,13,51,2
-8,22,18,12,3
-8,22,18,12,3
-8,22,18,12,3
-8,22,18,12,3
-8,22,18,40,3
-8,22,18,40,3
-8,23,13,42,1
-8,23,13,42,1
-8,23,17,32,3
-8,23,19,28,3
-8,23,20,27,3
-8,23,20,27,3
-8,23,21,49,3
-8,24,14,0,2
-8,24,14,0,2
-8,24,14,0,2
-8,24,14,0,2
-8,24,15,4,3
-8,24,15,4,3
-8,24,16,2,3
-8,24,16,3,3
-8,24,16,37,3
-8,24,17,9,3
-8,24,17,14,3
-8,25,13,34,1
-8,25,13,34,1
-8,25,13,34,1
-8,25,13,34,1
-8,25,13,34,1
-8,25,15,1,3
-8,25,17,58,3
-8,26,10,29,0
-8,26,10,29,0
-8,26,10,29,0
-8,26,10,29,0
-8,26,10,29,0
-8,26,16,42,3
-8,26,16,42,3
-8,26,18,41,3
-8,26,18,41,3
-8,27,13,41,2
-8,27,13,41,2
-8,27,13,41,2
-8,27,13,41,2
-8,27,17,42,3
-8,28,11,9,1
-8,28,11,9,1
-8,28,12,14,0
-8,28,12,14,1
-8,28,12,14,0
-8,28,15,3,2
-8,28,15,3,2
-8,28,16,31,3
-8,28,17,40,3
-8,29,14,44,3
-8,29,17,25,3
-8,30,12,5,0
-8,30,12,5,0
-8,30,12,5,0
-8,30,13,32,1
-8,30,13,32,1
-8,30,13,56,2
-8,30,14,23,2
-8,30,14,23,2
-8,30,14,23,2
-8,30,14,23,2
-8,30,14,41,2
-8,30,14,41,2
-8,30,14,41,2
-8,30,15,50,3
-8,30,17,0,3
-8,30,18,59,3
-8,30,18,59,3
-8,31,14,31,2
-8,31,14,31,2
-8,31,14,31,2
-8,31,17,59,3
-8,31,18,0,3
-9,1,16,13,3
-9,1,16,13,3
-9,1,16,13,3
-9,1,17,41,3
-9,2,13,44,1
-9,2,13,44,1
-9,2,13,44,1
-9,2,14,49,2
-9,2,14,49,2
-9,2,14,49,2
-9,2,16,6,3
-9,2,16,6,3
-9,2,17,2,3
-9,3,16,9,3
-9,3,17,35,3
-9,3,17,36,3
-9,4,12,57,1
-9,4,12,57,1
-9,4,15,8,3
-9,4,15,34,3
-9,4,16,26,3
-9,4,16,26,3
-9,4,18,37,3
-9,4,18,37,3
-9,4,18,37,3
-9,6,11,18,0
-9,6,11,18,0
-9,6,12,54,1
-9,6,12,54,1
-9,6,14,21,2
-9,6,14,21,2
-9,6,19,20,3
-9,7,11,50,0
-9,7,14,17,2
-9,7,14,57,3
-9,7,14,57,3
-9,7,16,56,3
-9,7,16,56,3
-9,7,16,56,3
-9,7,16,56,3
-9,7,18,38,3
-9,7,18,38,3
-9,8,11,4,2
-9,8,11,4,2
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,14,0
-9,8,11,14,1
-9,8,11,14,1
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,1
-9,8,12,36,0
-9,8,12,36,0
-9,8,12,36,0
-9,8,12,36,0
-9,8,12,36,0
-9,8,13,37,1
-9,8,13,37,1
-9,8,13,37,1
-9,8,14,20,2
-9,8,14,20,2
-9,8,18,20,3
-9,9,12,47,1
-9,9,12,47,2
-9,9,12,47,2
-9,9,19,5,3
-9,10,13,15,1
-9,10,13,15,1
-9,10,13,15,0
-9,10,16,49,3
-9,10,19,6,3
-9,10,21,5,3
-9,11,14,16,2
-9,11,14,16,2
-9,11,14,16,2
-9,11,18,41,3
-9,12,14,43,2
-9,12,14,43,2
-9,12,14,43,2
-9,12,16,14,3
-9,12,17,12,3
-9,12,17,12,2
-9,12,17,12,3
-9,12,17,12,2
-9,12,20,44,3
-9,13,19,52,3
-9,14,14,39,2
-9,14,14,39,2
-9,14,15,14,3
-9,14,17,29,3
-9,14,17,29,3
-9,14,17,29,3
-9,15,11,41,1
-9,15,11,41,1
-9,15,13,4,1
-9,15,14,3,1
-9,15,14,3,2
-9,16,12,36,1
-9,16,12,36,1
-9,16,12,36,1
-9,16,12,36,1
-9,16,12,48,1
-9,16,12,48,1
-9,16,13,51,1
-9,16,13,51,2
-9,16,13,51,1
-9,16,15,13,3
-9,16,15,14,3
-9,16,15,14,3
-9,17,10,27,0
-9,17,10,27,0
-9,17,11,10,0
-9,17,11,10,0
-9,17,11,10,0
-9,17,12,43,1
-9,17,12,43,1
-9,17,12,43,1
-9,17,13,32,1
-9,17,13,32,1
-9,17,14,5,1
-9,17,14,5,2
-9,17,14,6,2
-9,17,15,7,3
-9,17,15,49,3
-9,17,15,49,3
-9,17,18,12,3
-9,17,18,13,3
+7,20,12,13,2
+7,20,14,40,1
+7,20,14,40,2
+7,21,13,2,2
+7,21,13,2,2
+7,21,14,23,2
+7,21,14,23,2
+7,21,15,41,2
+7,21,16,54,2
+7,21,16,54,2
+7,21,17,45,3
+7,22,12,28,3
+7,22,15,35,2
+7,22,15,35,2
+7,22,18,59,3
+7,22,18,59,3
+7,23,12,32,2
+7,23,12,32,2
+7,23,16,7,2
+7,23,16,7,2
+7,23,16,7,2
+7,23,16,7,2
+7,23,16,7,2
+7,24,12,4,0
+7,24,12,4,0
+7,24,12,4,1
+7,24,14,38,2
+7,24,14,38,2
+7,24,18,54,3
+7,25,12,31,0
+7,25,12,32,1
+7,25,12,32,1
+7,25,15,6,3
+7,25,18,56,3
+7,26,13,41,2
+7,26,19,14,3
+7,27,11,39,2
+7,27,11,39,3
+7,27,11,46,3
+7,27,11,46,2
+7,27,13,8,2
+7,27,13,8,2
+7,27,13,9,2
+7,27,13,45,2
+7,27,13,45,2
+7,27,15,38,3
+7,28,12,12,2
+7,28,12,13,2
+7,28,12,41,2
+7,28,12,41,2
+7,28,12,41,2
+7,28,14,0,1
+7,28,14,0,2
+7,28,15,21,3
+7,28,18,56,3
+7,29,10,9,1
+7,29,10,9,1
+7,29,10,9,1
+7,29,11,54,0
+7,29,11,54,0
+7,29,11,54,0
+7,29,11,54,1
+7,29,14,10,2
+7,29,16,44,2
+7,29,16,44,2
+7,30,16,7,3
+7,30,18,45,3
+7,31,13,2,0
+7,31,13,2,1
+7,31,13,3,1
+7,31,13,3,1
+7,31,13,3,1
+7,31,18,39,3
+8,1,12,22,0
+8,1,12,22,1
+8,1,14,20,2
+8,1,14,20,2
+8,1,14,20,2
+8,1,15,55,3
+8,1,18,31,3
+8,1,18,37,3
+8,1,18,37,3
+8,1,19,2,3
+8,1,19,2,3
+8,1,20,5,3
+8,2,10,9,2
+8,2,10,9,1
+8,2,10,9,2
+8,2,10,9,2
+8,2,13,58,2
+8,2,13,58,2
+8,2,15,44,3
+8,2,15,44,3
+8,2,15,44,3
+8,2,17,21,3
+8,2,17,21,3
+8,2,17,21,3
+8,3,13,31,1
+8,3,13,31,2
+8,3,13,32,2
+8,3,16,43,3
+8,4,13,20,1
+8,4,13,20,2
+8,4,18,27,3
+8,5,13,37,2
+8,5,13,37,2
+8,5,18,33,3
+8,6,11,24,3
+8,6,11,24,3
+8,6,11,24,3
+8,6,13,50,3
+8,7,13,4,2
+8,7,13,4,2
+8,7,14,56,3
+8,8,12,13,2
+8,8,12,13,2
+8,8,15,51,2
+8,8,15,51,2
+8,8,15,51,3
+8,9,13,32,2
+8,9,13,32,2
+8,9,13,32,2
+8,9,15,8,2
+8,9,15,8,2
+8,9,15,8,2
+8,9,16,19,2
+8,10,11,32,0
+8,10,11,32,1
+8,10,11,32,1
+8,10,13,13,1
+8,10,13,13,1
+8,10,13,13,2
+8,10,16,42,3
+8,10,16,42,3
+8,11,14,6,2
+8,11,14,7,2
+8,11,18,54,3
+8,11,18,54,3
+8,11,18,54,3
+8,12,12,27,1
+8,12,12,27,1
+8,12,12,28,1
+8,12,13,53,2
+8,12,13,53,2
+8,12,13,53,2
+8,12,15,21,3
+8,13,13,16,1
+8,13,13,16,1
+8,13,13,16,1
+8,13,14,14,2
+8,13,14,14,2
+8,13,16,11,3
+8,13,17,18,3
+8,14,13,7,1
+8,14,13,7,1
+8,14,13,7,1
+8,14,13,7,1
+8,14,13,7,2
+8,14,13,7,2
+8,14,15,6,3
+8,15,14,5,2
+8,15,14,5,2
+8,15,14,6,2
+8,15,14,6,2
+8,15,16,41,3
+8,15,16,41,3
+8,15,17,30,3
+8,16,13,40,2
+8,16,13,40,2
+8,16,17,52,3
+8,16,17,53,3
+8,17,13,34,1
+8,17,13,35,2
+8,17,14,7,2
+8,17,19,2,3
+8,18,10,21,3
+8,18,11,14,2
+8,18,11,14,2
+8,18,11,14,2
+8,18,11,14,2
+8,18,14,25,2
+8,18,14,25,3
+8,18,14,25,2
+8,18,18,18,3
+8,18,18,19,3
+8,19,18,33,3
+8,19,18,33,3
+8,19,18,33,3
+8,19,18,33,3
+8,20,14,28,2
+8,20,14,28,2
+8,20,14,28,2
+8,20,14,28,2
+8,20,17,8,3
+8,20,18,22,3
+8,21,11,24,1
+8,21,11,24,1
+8,21,11,24,1
+8,21,15,34,3
+8,21,18,55,3
+8,22,12,3,1
+8,22,12,4,2
+8,22,12,4,2
+8,22,13,51,2
+8,22,13,51,2
+8,22,13,51,2
+8,22,18,12,3
+8,22,18,12,3
+8,22,18,12,3
+8,22,18,12,3
+8,22,18,40,3
+8,22,18,40,3
+8,23,13,42,1
+8,23,13,42,1
+8,23,17,32,3
+8,23,19,28,3
+8,23,20,27,3
+8,23,20,27,3
+8,23,21,49,3
+8,24,14,0,2
+8,24,14,0,2
+8,24,14,0,2
+8,24,14,0,2
+8,24,15,4,3
+8,24,15,4,3
+8,24,16,2,3
+8,24,16,3,3
+8,24,16,37,3
+8,24,17,9,3
+8,24,17,14,3
+8,25,13,34,1
+8,25,13,34,1
+8,25,13,34,1
+8,25,13,34,1
+8,25,13,34,1
+8,25,15,1,3
+8,25,17,58,3
+8,26,10,29,0
+8,26,10,29,0
+8,26,10,29,0
+8,26,10,29,0
+8,26,10,29,0
+8,26,16,42,3
+8,26,16,42,3
+8,26,18,41,3
+8,26,18,41,3
+8,27,13,41,2
+8,27,13,41,2
+8,27,13,41,2
+8,27,13,41,2
+8,27,17,42,3
+8,28,11,9,1
+8,28,11,9,1
+8,28,12,14,0
+8,28,12,14,1
+8,28,12,14,0
+8,28,15,3,2
+8,28,15,3,2
+8,28,16,31,3
+8,28,17,40,3
+8,29,14,44,3
+8,29,17,25,3
+8,30,12,5,0
+8,30,12,5,0
+8,30,12,5,0
+8,30,13,32,1
+8,30,13,32,1
+8,30,13,56,2
+8,30,14,23,2
+8,30,14,23,2
+8,30,14,23,2
+8,30,14,23,2
+8,30,14,41,2
+8,30,14,41,2
+8,30,14,41,2
+8,30,15,50,3
+8,30,17,0,3
+8,30,18,59,3
+8,30,18,59,3
+8,31,14,31,2
+8,31,14,31,2
+8,31,14,31,2
+8,31,17,59,3
+8,31,18,0,3
+9,1,16,13,3
+9,1,16,13,3
+9,1,16,13,3
+9,1,17,41,3
+9,2,13,44,1
+9,2,13,44,1
+9,2,13,44,1
+9,2,14,49,2
+9,2,14,49,2
+9,2,14,49,2
+9,2,16,6,3
+9,2,16,6,3
+9,2,17,2,3
+9,3,16,9,3
+9,3,17,35,3
+9,3,17,36,3
+9,4,12,57,1
+9,4,12,57,1
+9,4,15,8,3
+9,4,15,34,3
+9,4,16,26,3
+9,4,16,26,3
+9,4,18,37,3
+9,4,18,37,3
+9,4,18,37,3
+9,6,11,18,0
+9,6,11,18,0
+9,6,12,54,1
+9,6,12,54,1
+9,6,14,21,2
+9,6,14,21,2
+9,6,19,20,3
+9,7,11,50,0
+9,7,14,17,2
+9,7,14,57,3
+9,7,14,57,3
+9,7,16,56,3
+9,7,16,56,3
+9,7,16,56,3
+9,7,16,56,3
+9,7,18,38,3
+9,7,18,38,3
+9,8,11,4,2
+9,8,11,4,2
+9,8,11,13,0
+9,8,11,13,0
+9,8,11,13,0
+9,8,11,13,0
+9,8,11,13,0
+9,8,11,14,0
+9,8,11,14,1
+9,8,11,14,1
+9,8,12,1,0
+9,8,12,1,0
+9,8,12,1,0
+9,8,12,1,0
+9,8,12,1,0
+9,8,12,1,1
+9,8,12,36,0
+9,8,12,36,0
+9,8,12,36,0
+9,8,12,36,0
+9,8,12,36,0
+9,8,13,37,1
+9,8,13,37,1
+9,8,13,37,1
+9,8,14,20,2
+9,8,14,20,2
+9,8,18,20,3
+9,9,12,47,1
+9,9,12,47,2
+9,9,12,47,2
+9,9,19,5,3
+9,10,13,15,1
+9,10,13,15,1
+9,10,13,15,0
+9,10,16,49,3
+9,10,19,6,3
+9,10,21,5,3
+9,11,14,16,2
+9,11,14,16,2
+9,11,14,16,2
+9,11,18,41,3
+9,12,14,43,2
+9,12,14,43,2
+9,12,14,43,2
+9,12,16,14,3
+9,12,17,12,3
+9,12,17,12,2
+9,12,17,12,3
+9,12,17,12,2
+9,12,20,44,3
+9,13,19,52,3
+9,14,14,39,2
+9,14,14,39,2
+9,14,15,14,3
+9,14,17,29,3
+9,14,17,29,3
+9,14,17,29,3
+9,15,11,41,1
+9,15,11,41,1
+9,15,13,4,1
+9,15,14,3,1
+9,15,14,3,2
+9,16,12,36,1
+9,16,12,36,1
+9,16,12,36,1
+9,16,12,36,1
+9,16,12,48,1
+9,16,12,48,1
+9,16,13,51,1
+9,16,13,51,2
+9,16,13,51,1
+9,16,15,13,3
+9,16,15,14,3
+9,16,15,14,3
+9,17,10,27,0
+9,17,10,27,0
+9,17,11,10,0
+9,17,11,10,0
+9,17,11,10,0
+9,17,12,43,1
+9,17,12,43,1
+9,17,12,43,1
+9,17,13,32,1
+9,17,13,32,1
+9,17,14,5,1
+9,17,14,5,2
+9,17,14,6,2
+9,17,15,7,3
+9,17,15,49,3
+9,17,15,49,3
+9,17,18,12,3
+9,17,18,13,3
diff --git a/feedbackloop.learner/src/main/resources/log4j2.xml b/feedbackloop.learner/src/main/resources/log4j2.xml
index 481178a66aaaf356ef87241bf07871f4e808c3ee..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.learner/src/main/resources/log4j2.xml
+++ b/feedbackloop.learner/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
@@ -19,4 +19,4 @@
             <AppenderRef ref="RollingFile"/>
         </Root>
     </Loggers>
-</Configuration>
\ No newline at end of file
+</Configuration>
diff --git a/feedbackloop.main/build.gradle b/feedbackloop.main/build.gradle
index 2287b7b94c161ad8a3268ca9d751e1c1e2137e88..c5a11eda0213af9b519a5c8df62de30e8dc6b059 100644
--- a/feedbackloop.main/build.gradle
+++ b/feedbackloop.main/build.gradle
@@ -14,11 +14,6 @@ dependencies {
     compile project(':feedbackloop.analyze')
     compile project(':feedbackloop.plan')
     compile project(':feedbackloop.execute')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 run {
diff --git a/feedbackloop.main/src/main/resources/log4j2.xml b/feedbackloop.main/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.main/src/main/resources/log4j2.xml
+++ b/feedbackloop.main/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/feedbackloop.monitor/build.gradle b/feedbackloop.monitor/build.gradle
index 77b9513097ddb84f3d1e003489bade769e38c4a7..6b73819fad70219d9bb7465e0b3c903352bc76d7 100644
--- a/feedbackloop.monitor/build.gradle
+++ b/feedbackloop.monitor/build.gradle
@@ -10,11 +10,6 @@ apply plugin: 'application'
 dependencies {
     compile project(':eraser-base')
     compile project(':feedbackloop.api')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 run {
diff --git a/feedbackloop.monitor/src/main/resources/log4j2.xml b/feedbackloop.monitor/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.monitor/src/main/resources/log4j2.xml
+++ b/feedbackloop.monitor/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/feedbackloop.plan/build.gradle b/feedbackloop.plan/build.gradle
index 2a9ac05f8f3beb83fab66354c598d27ca84bb4b7..81156210881de5442bc7d1f79e3dcefdf51c042a 100644
--- a/feedbackloop.plan/build.gradle
+++ b/feedbackloop.plan/build.gradle
@@ -10,11 +10,6 @@ apply plugin: 'application'
 dependencies {
     compile project(':eraser-base')
     compile project(':feedbackloop.api')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 run {
diff --git a/feedbackloop.plan/src/main/resources/log4j2.xml b/feedbackloop.plan/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/feedbackloop.plan/src/main/resources/log4j2.xml
+++ b/feedbackloop.plan/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/influx_test/build.gradle b/influx_test/build.gradle
index fcb387f503c0960fee15af9ffbc5913d536bcb07..ead9dcee7f559d5c2ae9a472c8c09096692af3b2 100644
--- a/influx_test/build.gradle
+++ b/influx_test/build.gradle
@@ -9,13 +9,7 @@ apply plugin: 'application'
 
 dependencies {
     compile project(':eraser-base')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    compile 'org.influxdb:influxdb-java:2.14'
-
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
+    compile group: 'org.influxdb', name: 'influxdb-java', version: '2.15'
 }
 
 run {
diff --git a/influx_test/src/main/resources/log4j2.xml b/influx_test/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/influx_test/src/main/resources/log4j2.xml
+++ b/influx_test/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/integration/build.gradle b/integration/build.gradle
index 47be63191b320dce8ea7c0673b15943a1496b634..584eb25249ba3c44c928d77ac7a9368d0890daad 100644
--- a/integration/build.gradle
+++ b/integration/build.gradle
@@ -19,7 +19,4 @@ run {
 dependencies {
     compile project(':eraser-base')
     compile project(':openhab-mock')
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
 }
diff --git a/integration/src/main/java/de/tudresden/inf/st/eraser/integration/IntegrationMain.java b/integration/src/main/java/de/tudresden/inf/st/eraser/integration/IntegrationMain.java
index a7ca2904072ea46e0a0243fd7bc47328729a55c4..3418238f3572595c4a9640369767a1d34d037920 100644
--- a/integration/src/main/java/de/tudresden/inf/st/eraser/integration/IntegrationMain.java
+++ b/integration/src/main/java/de/tudresden/inf/st/eraser/integration/IntegrationMain.java
@@ -79,17 +79,14 @@ public class IntegrationMain {
       logger.info("Start!");
       Root model = Main.importFromFile();
 //      Root model = importFromLocalFile();
-      logger.debug("Got model: {}", model.description());
+      logger.debug("Got model: {}", model.getOpenHAB2Model().description());
       MqttRoot mqttRoot = new MqttRoot();
       mqttRoot.setHostByName("localhost");
       mqttRoot.setIncomingPrefix("oh2/out/");
-      MqttTopic irisTopic = new MqttTopic();
-      irisTopic.setPart("iris1_item");
       MqttTopic irisStateTopic = new MqttTopic();
-      irisStateTopic.setPart("state");
-      irisTopic.addSubTopic(irisStateTopic);
+      irisStateTopic.setTopicString("iris1_item/state");
       Item iris = null;
-      for (Item item : model.items()) {
+      for (Item item : model.getOpenHAB2Model().items()) {
         if (item.getID().equals("iris1_item")) {
           iris = item;
           break;
@@ -99,8 +96,7 @@ public class IntegrationMain {
         logger.error("Could not find iris1. Exiting");
         return;
       }
-      irisStateTopic.setItem(iris);
-      mqttRoot.addTopic(irisTopic);
+      irisStateTopic.addItem(iris);
       model.setMqttRoot(mqttRoot);
 //      JsonSerializer.write(model, "src/main/resources/openhab2-data.json");
       JsonSerializer.write(model, "openhab2-data.json");
diff --git a/ml_test/build.gradle b/ml_test/build.gradle
index 241f853811b83bf6161c9e98efc0a22ec1ba64f2..2d500200789d52e56de89a77d453c6af7ae6d49b 100644
--- a/ml_test/build.gradle
+++ b/ml_test/build.gradle
@@ -9,11 +9,6 @@ apply plugin: 'application'
 
 dependencies {
     compile project(':eraser-base')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 run {
diff --git a/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java b/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java
index d17b6c97dc647e9faebfb1fd1ca9b4450fef4725..59c156853782b8e58010d24393e7c14c73ffefb8 100644
--- a/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java
+++ b/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java
@@ -23,15 +23,15 @@ public class Main {
   }
 
   private static Root createModel() {
-    Root model = Root.createEmptyRoot();
+    Root root = Root.createEmptyRoot();
     Group group = new Group();
     group.setID("Group1");
-    model.addGroup(group);
+    root.getOpenHAB2Model().addGroup(group);
     Item activityItem = newItem("activity", "Recognized activity", false, 8);
     Item brightnessItem = newItem("brightness", "Measured brightness", false, 5);
     group.addItem(activityItem);
     group.addItem(brightnessItem);
-    return model;
+    return root;
   }
 
   private static NumberItem newItem(String id, String label, boolean defaultSendState, int initialState) {
@@ -70,7 +70,7 @@ public class Main {
       }
     }
     logger.info("Classification results: {}", results);
-    logger.info("Took {}ms", String.join("ms, ", times.stream().map(l -> Long.toString(l)).collect(Collectors.toList())));
+    logger.info("Took {}ms", times.stream().map(l -> Long.toString(l)).collect(Collectors.joining("ms, ")));
     logger.info("Took on average: {}ms",
         Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).average().orElse(-1));
     logger.info("Took on median: {}ms",
@@ -78,30 +78,36 @@ public class Main {
             .skip((REPETITIONS-1)/2).limit(2-REPETITIONS%2).average().orElse(Double.NaN));
   }
 
-  /**
-   * Purpose: Create a neural network with 3 layers (2 + 10 + 1 neurons)
-   * Sigmoid function for all layers, combinator of output is identity function
-   */
-  private static void createAndTestBrightnessNetwork() {
-    /*
-   	- Helligkeit NN:
-      - arbeitet momentan mit Zonen und nicht mit einzelnen Lampen
-      - 3 Layers
-      - Input Layer hat Neuronen (Aktivitätsnummer, Wert vom Helligkeitssensor)
-      - Hidden Layer hat 10 Neuronen
-      - Output Layer hat 1 Neuron ( Helligkeitswert)
-      - Aktivierungsfunktion: Sigmoidfunktion <- selbe für alle Layers
-     */
-    Root model = createModel();
-    Item activityItem = model.resolveItem("activity").orElseThrow(
+  private static class PreparationResult {
+    OutputLayer outputLayer;
+    DoubleArrayDoubleFunction sigmoid;
+    InputNeuron activity;
+    InputNeuron brightness;
+    NeuralNetworkRoot nn;
+    HiddenNeuron[] hiddenNeurons;
+
+    PreparationResult(OutputLayer outputLayer, DoubleArrayDoubleFunction sigmoid, InputNeuron activity,
+                      InputNeuron brightness, NeuralNetworkRoot nn, HiddenNeuron[] hiddenNeurons) {
+      this.outputLayer = outputLayer;
+      this.sigmoid = sigmoid;
+      this.activity = activity;
+      this.brightness = brightness;
+      this.nn = nn;
+      this.hiddenNeurons = hiddenNeurons;
+    }
+  }
+
+  private static PreparationResult prepareNetwork() {
+    Root root = createModel();
+    Item activityItem = root.getOpenHAB2Model().resolveItem("activity").orElseThrow(
         () -> new RuntimeException("Activity not found"));
-    Item brightnessItem = model.resolveItem("brightness").orElseThrow(
+    Item brightnessItem = root.getOpenHAB2Model().resolveItem("brightness").orElseThrow(
         () -> new RuntimeException("Brightness not found"));
     NeuralNetworkRoot nn = new NeuralNetworkRoot();
 
     DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
 
-    // input layer
+    // input layer (2 neurons)
     InputNeuron activity = new InputNeuron();
     activity.setItem(activityItem);
     InputNeuron brightness = new InputNeuron();
@@ -109,31 +115,50 @@ public class Main {
     nn.addInputNeuron(activity);
     nn.addInputNeuron(brightness);
 
-    // output layer
     OutputLayer outputLayer = new OutputLayer();
-    OutputNeuron output = new OutputNeuron();
-    output.setLabel("Brightness_Output");
-    output.setActivationFormula(sigmoid);
-    outputLayer.addOutputNeuron(output);
-    // we just have one output neuron, thus use IdentityFunction
-    outputLayer.setCombinator(inputs -> inputs[0]);
     nn.setOutputLayer(outputLayer);
 
-    // hidden layer
+    // hidden layer (10 neurons)
     HiddenNeuron[] hiddenNeurons = new HiddenNeuron[10];
-    for (int i = 0; i < hiddenNeurons.length; i++) {
+    for (int hiddenIndex = 0; hiddenIndex < hiddenNeurons.length; hiddenIndex++) {
       HiddenNeuron hiddenNeuron = new HiddenNeuron();
       hiddenNeuron.setActivationFormula(sigmoid);
-      hiddenNeurons[i] = hiddenNeuron;
       nn.addHiddenNeuron(hiddenNeuron);
       activity.connectTo(hiddenNeuron, 1.0/2.0);
       brightness.connectTo(hiddenNeuron, 1.0/2.0);
-      hiddenNeuron.connectTo(output, 1.0/hiddenNeurons.length);
     }
+    root.getMachineLearningRoot().setPreferenceLearning(nn);
 
-    model.getMachineLearningRoot().setPreferenceLearning(nn);
+    return new PreparationResult(outputLayer, sigmoid, activity, brightness, nn, hiddenNeurons);
+  }
 
-    classifyTimed(nn, NeuralNetworkRoot::classify,
+  /**
+   * Purpose: Create a neural network with 3 layers (2 + 10 + 1 neurons)
+   * Sigmoid function for all layers, combinator of output is identity function
+   */
+  private static void createAndTestBrightnessNetwork() {
+    /*
+   	- Helligkeit NN:
+      - arbeitet momentan mit Zonen und nicht mit einzelnen Lampen
+      - 3 Layers
+      - Input Layer hat Neuronen (Aktivitätsnummer, Wert vom Helligkeitssensor)
+      - Hidden Layer hat 10 Neuronen
+      - Output Layer hat 1 Neuron ( Helligkeitswert)
+      - Aktivierungsfunktion: Sigmoidfunktion <- selbe für alle Layers
+     */
+    PreparationResult pr = prepareNetwork();
+    OutputNeuron output = new OutputNeuron();
+    output.setLabel("Brightness_Output");
+    output.setActivationFormula(pr.sigmoid);
+    pr.outputLayer.addOutputNeuron(output);
+    // we just have one output neuron, thus use IdentityFunction
+    pr.outputLayer.setCombinator(inputs -> inputs[0]);
+
+    for (HiddenNeuron hiddenNeuron : pr.hiddenNeurons) {
+      hiddenNeuron.connectTo(output, 1.0/pr.hiddenNeurons.length);
+    }
+
+    classifyTimed(pr.nn, NeuralNetworkRoot::classify,
         classification -> Double.toString(classification.number));
   }
 
@@ -142,52 +167,23 @@ public class Main {
    * Sigmoid function for all layers, combinator creates RGB value in hex form
    */
   private static void createAndTestColorNetwork() {
-    Root model = createModel();
-    Item activityItem = model.resolveItem("activity").orElseThrow(
-        () -> new RuntimeException("Activity not found"));
-    Item brightnessItem = model.resolveItem("brightness").orElseThrow(
-        () -> new RuntimeException("Brightness not found"));
-    NeuralNetworkRoot nn = new NeuralNetworkRoot();
-
-    DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
-
-    // input layer (2 neurons)
-    InputNeuron activity = new InputNeuron();
-    activity.setItem(activityItem);
-    InputNeuron brightness = new InputNeuron();
-    brightness.setItem(brightnessItem);
-    nn.addInputNeuron(activity);
-    nn.addInputNeuron(brightness);
-
-    // output layer (3 neurons)
-    OutputLayer outputLayer = new OutputLayer();
+    PreparationResult pr = prepareNetwork();
     for (int i = 0; i < 3; i++) {
       OutputNeuron output = new OutputNeuron();
       output.setLabel("Brightness_Output_" + i);
       output.setActivationFormula(inputs -> Arrays.stream(inputs).sum());
-      outputLayer.addOutputNeuron(output);
+      pr.outputLayer.addOutputNeuron(output);
     }
     // we have three output neurons, combine them to a double value (representing RGB)
-    outputLayer.setCombinator(inputs -> 65536 * Math.ceil(255.0 * inputs[0]) + 256 * Math.ceil(255.0 * inputs[1]) + Math.ceil(255.0 * inputs[0]));
-    nn.setOutputLayer(outputLayer);
+    pr.outputLayer.setCombinator(inputs -> 65536 * Math.ceil(255.0 * inputs[0]) + 256 * Math.ceil(255.0 * inputs[1]) + Math.ceil(255.0 * inputs[0]));
 
-    // hidden layer (10 neurons)
-    HiddenNeuron[] hiddenNeurons = new HiddenNeuron[10];
-    for (int hiddenIndex = 0; hiddenIndex < hiddenNeurons.length; hiddenIndex++) {
-      HiddenNeuron hiddenNeuron = new HiddenNeuron();
-      hiddenNeuron.setActivationFormula(sigmoid);
-      hiddenNeurons[hiddenIndex] = hiddenNeuron;
-      nn.addHiddenNeuron(hiddenNeuron);
-      activity.connectTo(hiddenNeuron, 1.0/2.0);
-      brightness.connectTo(hiddenNeuron, 1.0/2.0);
-      for (int outputIndex = 0; outputIndex < outputLayer.getNumOutputNeuron(); outputIndex++) {
-        hiddenNeuron.connectTo(outputLayer.getOutputNeuron(outputIndex), random.nextDouble() * 1.0/hiddenNeurons.length);
+    for (HiddenNeuron hiddenNeuron : pr.hiddenNeurons) {
+      for (int outputIndex = 0; outputIndex < pr.outputLayer.getNumOutputNeuron(); outputIndex++) {
+        hiddenNeuron.connectTo(pr.outputLayer.getOutputNeuron(outputIndex), random.nextDouble() * 1.0/pr.hiddenNeurons.length);
       }
     }
 
-    model.getMachineLearningRoot().setPreferenceLearning(nn);
-
-    classifyTimed(nn, NeuralNetworkRoot::classify,
+    classifyTimed(pr.nn, NeuralNetworkRoot::classify,
         classification -> Double.toHexString(classification.number));
 
 //    long before = System.nanoTime();
diff --git a/ml_test/src/main/resources/log4j2.xml b/ml_test/src/main/resources/log4j2.xml
index a5132e32502bf95c14ac0e6e50c2185325643ca7..686c2a889038bd7e7d89928939edfd09a5f15a94 100644
--- a/ml_test/src/main/resources/log4j2.xml
+++ b/ml_test/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/openhab-mock/build.gradle b/openhab-mock/build.gradle
index 94017c3d7308df49f404a1c49203b807fa4a5ef7..4de21c3b36c9d837c46aaf3245c86ad2b717d91a 100644
--- a/openhab-mock/build.gradle
+++ b/openhab-mock/build.gradle
@@ -19,9 +19,5 @@ run {
 dependencies {
     compile project(':eraser-base')
     compile project(':commons.color')
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    compile 'com.opencsv:opencsv:3.8'
-    compile 'org.apache.commons:commons-math3:3.6.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
+    compile group: 'com.opencsv', name: 'opencsv', version: '3.8'
 }
diff --git a/openhab-mock/src/main/resources/log4j2.xml b/openhab-mock/src/main/resources/log4j2.xml
index 89799a2f09ba34d288e610d960b3ed6348213105..18175a02521156259c8789745fb849fa893302e9 100644
--- a/openhab-mock/src/main/resources/log4j2.xml
+++ b/openhab-mock/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/org.openhab.action.machinelearn/build.gradle b/org.openhab.action.machinelearn/build.gradle
index 12d6d1e875fee2da8bb334ebcf14e95d9e3c66af..0ca194823aeeff40aa1de7f6da6c61896ff63e5b 100644
--- a/org.openhab.action.machinelearn/build.gradle
+++ b/org.openhab.action.machinelearn/build.gradle
@@ -5,31 +5,16 @@ repositories {
 sourceCompatibility = 1.8
 
 apply plugin: 'java'
-//apply plugin: 'application'
 
 dependencies {
     compile files('lib/weka.jar')
     compile project(':stub.org.openhab.core.scriptengine.action')
-//    compile 'org.apache.commons:commons-lang3:3.8.1'
-//    compile group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.annotation', version: '2.0.0'
-//    compile 'org.openhab.core.library.types'
-//    compile 'org.openhab.core.scriptengine.action'
     compile group: 'org.osgi', name: 'org.osgi.framework', version: '1.9.0'
     compile group: 'org.osgi', name: 'org.osgi.service.cm', version: '1.6.0'
     compile group: 'org.osgi', name: 'org.osgi.service.component', version: '1.4.0'
     compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
-//run {
-//    mainClassName = 'de.tudresden.inf.st.eraser.skywriter_hue_integration.Main'
-//    standardInput = System.in
-//    if (project.hasProperty("appArgs")) {
-//        args Eval.me(appArgs)
-//    }
-//}
-
 sourceSets {
     main {
         java {
diff --git a/org.openlicht.action.reinforcementlearning/build.gradle b/org.openlicht.action.reinforcementlearning/build.gradle
index 54e1daf916b41766a9c4dc961e365040744ff608..66417e5da744784dfcb590357db19eaa1ce14bd6 100644
--- a/org.openlicht.action.reinforcementlearning/build.gradle
+++ b/org.openlicht.action.reinforcementlearning/build.gradle
@@ -10,16 +10,11 @@ apply plugin: 'java'
 dependencies {
     compile files('lib/encog-core-3.4.jar')
     compile project(':stub.org.openhab.core.scriptengine.action')
-    compile 'org.apache.commons:commons-lang3:3.8.1'
     compile group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.annotation', version: '2.2.200'
-//    compile 'org.openhab.core.library.types'
-//    compile 'org.openhab.core.scriptengine.action'
     compile group: 'org.osgi', name: 'org.osgi.framework', version: '1.9.0'
     compile group: 'org.osgi', name: 'org.osgi.service.cm', version: '1.6.0'
     compile group: 'org.osgi', name: 'org.osgi.service.component', version: '1.4.0'
     compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 sourceSets {
diff --git a/project-template/build.gradle b/project-template/build.gradle
index 9cb094bf6e4b76641f50f823f9f526b8c8ba7657..09e63423457a9994046774fcc0d5bf657b04db01 100644
--- a/project-template/build.gradle
+++ b/project-template/build.gradle
@@ -10,8 +10,8 @@ apply plugin: 'application'
 dependencies {
     compile project(':eraser-base')
     compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
+    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.2'
+    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.2'
     testCompile group: 'junit', name: 'junit', version: '4.12'
     testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
diff --git a/project-template/src/main/resources/log4j2.xml b/project-template/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/project-template/src/main/resources/log4j2.xml
+++ b/project-template/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/skywriter-hue-integration/build.gradle b/skywriter-hue-integration/build.gradle
index ab64e3d71e5b0b3c4e5939b2ffa387718e05a242..37bb2da071e8fe7cce1f769168cf33cddced3e32 100644
--- a/skywriter-hue-integration/build.gradle
+++ b/skywriter-hue-integration/build.gradle
@@ -10,11 +10,6 @@ apply plugin: 'application'
 dependencies {
     compile project(':eraser-base')
     compile project(':commons.color')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
 }
 
 run {
diff --git a/skywriter-hue-integration/src/main/java/de/tudresden/inf/st/eraser/skywriter_hue_integration/Main.java b/skywriter-hue-integration/src/main/java/de/tudresden/inf/st/eraser/skywriter_hue_integration/Main.java
index f6769cb625d1b914f54f6f377500166e61c1f9a9..3f64c8cdd3c530d8a287a0d3ce0d140f449c5921 100644
--- a/skywriter-hue-integration/src/main/java/de/tudresden/inf/st/eraser/skywriter_hue_integration/Main.java
+++ b/skywriter-hue-integration/src/main/java/de/tudresden/inf/st/eraser/skywriter_hue_integration/Main.java
@@ -3,10 +3,7 @@ package de.tudresden.inf.st.eraser.skywriter_hue_integration;
 import beaver.Parser;
 import de.tudresden.inf.st.eraser.commons.color.ColorUtils;
 import de.tudresden.inf.st.eraser.commons.color.ColorUtils.RGBvalues;
-import de.tudresden.inf.st.eraser.jastadd.model.Item;
-import de.tudresden.inf.st.eraser.jastadd.model.Root;
-import de.tudresden.inf.st.eraser.jastadd.model.Rule;
-import de.tudresden.inf.st.eraser.jastadd.model.SetStateFromItemsAction;
+import de.tudresden.inf.st.eraser.jastadd.model.*;
 import de.tudresden.inf.st.eraser.openhab2.mqtt.MQTTUpdater;
 import de.tudresden.inf.st.eraser.util.ParserUtils;
 import org.apache.logging.log4j.LogManager;
@@ -39,7 +36,8 @@ public class Main {
   @SuppressWarnings("ResultOfMethodCallIgnored")
   public static void main(String[] args) throws IOException, Parser.Exception {
     // use openHAB-eraser-connection to update hue (automatically done)
-    Root model = ParserUtils.load("skywriter-hue.eraser", Main.class);
+    Root root = ParserUtils.load("skywriter-hue.eraser", Main.class);
+    OpenHAB2Model model = root.getOpenHAB2Model();
     Item irisItem = model.resolveItem("iris1_item").orElseThrow(() ->
         new NoSuchElementException("Iris1_item not found"));
     Item skywriter1_x = model.resolveItem("skywriter1_x").orElseThrow(() ->
@@ -48,10 +46,10 @@ public class Main {
         new NoSuchElementException("Skywriter1 y not found"));
     Item skywriter1_xyz = model.resolveItem("skywriter1_xyz").orElseThrow(() ->
         new NoSuchElementException("Skywriter1 xyz not found"));
-    System.out.println(model.prettyPrint());
+    System.out.println(root.prettyPrint());
     // define rule to switch color, based on xyz-to-rgb-to-hsb mapping
     Rule mapXYtoIrisState = new Rule();
-    model.addRule(mapXYtoIrisState);
+    root.addRule(mapXYtoIrisState);
     mapXYtoIrisState.activateFor(skywriter1_xyz);
 //    mapXYtoIrisState.addEventFor(skywriter1_y, "y changed");
     SetStateFromItemsAction action = new SetStateFromItemsAction();
@@ -64,7 +62,7 @@ public class Main {
     Lock abortLock = new ReentrantLock();
     Condition abortCondition = abortLock.newCondition();
     Thread readFromOpenHABThread = new Thread(() -> {
-      try (MQTTUpdater updater = new MQTTUpdater(model)) {
+      try (MQTTUpdater updater = new MQTTUpdater(root)) {
         updater.start();
         if (!updater.waitUntilReady(3, TimeUnit.SECONDS)) {
           logger.error("openHAB reader not ready. Aborting.");
@@ -84,7 +82,7 @@ public class Main {
       }
     });
     if (USE_READ_FROM_OPENHAB || SEND_TO_OPENHAB) {
-      model.getMqttRoot().setHostByName(MQTT_HOST);
+      root.getMqttRoot().setHostByName(MQTT_HOST);
     }
     if (USE_READ_FROM_OPENHAB) {
       readFromOpenHABThread.start();
@@ -124,7 +122,7 @@ public class Main {
     }
 
     // wait for user to press enter
-    System.out.println("Press [Enter] to quit updating the model.");
+    System.out.println("Press [Enter] to quit updating the root.");
     System.in.read();
 
     // and then cancel the threads
diff --git a/skywriter-hue-integration/src/main/resources/log4j2.xml b/skywriter-hue-integration/src/main/resources/log4j2.xml
index 5c534092d64e9c1834c2ba20208c057e2b56be16..5d1091ea995c881e5985a0cfc925a952ff50d0bc 100644
--- a/skywriter-hue-integration/src/main/resources/log4j2.xml
+++ b/skywriter-hue-integration/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>
diff --git a/stub.org.openhab.core.scriptengine.action/build.gradle b/stub.org.openhab.core.scriptengine.action/build.gradle
index e2b54c7a70609c315c5b529678bdfb6cc2d8b461..2f5043786a9064ac0c84ffe10ab32a6d01940398 100644
--- a/stub.org.openhab.core.scriptengine.action/build.gradle
+++ b/stub.org.openhab.core.scriptengine.action/build.gradle
@@ -6,13 +6,6 @@ sourceCompatibility = 1.8
 
 apply plugin: 'java'
 
-dependencies {
-    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
-    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
-    testCompile group: 'junit', name: 'junit', version: '4.12'
-    testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
-}
-
 sourceSets {
     main {
         java {
diff --git a/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml b/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml
index 0594576fac98ba859e411597c90c8e3d989378bd..867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c 100644
--- a/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml
+++ b/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml
@@ -4,8 +4,8 @@
         <Console name="Console">
             <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
         </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
+        <RollingFile name="RollingFile" fileName="logs/eraser.log"
+                    filePattern="logs/eraser-%i.log">
             <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
             <Policies>
                 <OnStartupTriggeringPolicy/>