diff --git a/eraser.rest/.gitignore b/eraser.rest/.gitignore
deleted file mode 100644
index 70b583e34c3316bcd77c807e2d6b85db5e7d49f6..0000000000000000000000000000000000000000
--- a/eraser.rest/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/build/
-/bin/
-logs/
diff --git a/eraser.rest/build.gradle b/eraser.rest/build.gradle
deleted file mode 100644
index 0ac26261047eacbcc2b97709e7a5b4c1bc927b3b..0000000000000000000000000000000000000000
--- a/eraser.rest/build.gradle
+++ /dev/null
@@ -1,28 +0,0 @@
-buildscript {
-    dependencies {
-        classpath("org.springframework.boot:spring-boot-gradle-plugin:2.1.2.RELEASE")
-    }
-}
-
-plugins {
-    id 'io.franzbecker.gradle-lombok' version '1.14'
-}
-
-apply plugin: 'org.springframework.boot'
-apply plugin: 'io.spring.dependency-management'
-
-dependencies {
-    compile project(':eraser-base')
-    compile 'org.springframework.boot:spring-boot-starter-web'
-    compile group: 'io.springfox', name: 'springfox-swagger2', version: '2.9.2'
-    compile group: 'io.springfox', name: 'springfox-swagger-ui', version: '2.9.2'
-    testCompile 'org.springframework.boot:spring-boot-starter-test'
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Activity.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Activity.java
deleted file mode 100644
index 56da46d31f83e86c1bcd39446305d9588232bd1e..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Activity.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.ApiModelProperty;
-import lombok.Data;
-
-/**
- * A recognized activity resource.
- *
- * @author rschoene - Initial contribution
- */
-@Data(staticConstructor = "of")
-public class Activity {
-  @ApiModelProperty(notes = "Some identifier of this activity")
-  public final int identifier;
-  @ApiModelProperty(notes = "Name of the activity")
-  public final String description;
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ActivityController.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ActivityController.java
deleted file mode 100644
index 0766b2bb963a0fc0dd66a61f4937dcd844fc3e04..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ActivityController.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiOperation;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Spring boot controller specifying routes to REST API for activities.
- *
- * @author rschoene - Initial contribution
- */
-@RestController
-@RequestMapping("/activity")
-@Api(value = "activity-data", description = "Activity data")
-public class ActivityController {
-
-  private List<Activity> dummyListOfActivities = new ArrayList<>();
-  private Map<Integer, Activity> dummyMapOfActivities = new HashMap<>();
-  private int currentActivityIndex = 0;
-
-  public ActivityController() {
-    add(Activity.of(1, "Sitting in armchair"));
-    add(Activity.of(2, "Going to sleep"));
-    add(Activity.of(3, "Entering house"));
-  }
-
-  public void add(Activity activity) {
-    dummyListOfActivities.add(activity);
-    dummyMapOfActivities.put(activity.getIdentifier(), activity);
-  }
-
-  @ApiOperation(value = "Get all events in long form", response = List.class)
-  @GetMapping(value = "", produces = "application/json")
-  public List<Activity> getAllActivities() {
-    return dummyListOfActivities;
-  }
-
-  @ApiOperation(value = "Get detailed information of one event", response = Activity.class)
-  @GetMapping(value = "/current", produces = "application/json")
-  public Activity getCurrentActivity() {
-    return dummyListOfActivities.get(currentActivityIndex);
-  }
-
-  @ApiOperation(value = "Get detailed information of one event", response = Activity.class)
-  @GetMapping(value = "/{identifier}", produces = "application/json")
-  //@RequestParam(value = "identifier")
-  public Activity getActivity(@PathVariable int identifier) {
-    return dummyMapOfActivities.get(identifier);
-  }
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Application.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Application.java
deleted file mode 100644
index 66ca9dc3e5166cf98384d0a5b7149b9bad6b4553..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Application.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-
-/**
- * The main class to start the rest service.
- *
- * @author rschoene - Initial contribution
- */
-@SpringBootApplication
-public class Application {
-
-  public static void main(String[] args) {
-    SpringApplication.run(Application.class, args);
-  }
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ChangedItem.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ChangedItem.java
deleted file mode 100644
index 09f06872fe6010f7b1e98a49139ee2a3097c6a9e..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ChangedItem.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.ApiModelProperty;
-import lombok.Data;
-
-/**
- * One changed item and its new state.
- *
- * @author rschoene - Initial contribution
- */
-@Data
-public class ChangedItem {
-  @ApiModelProperty(notes = "The name of the changed item")
-  public final String name;
-  @ApiModelProperty(notes = "The new state of the item")
-  public final Object state;
-  @ApiModelProperty(notes = "The label of the changed item")
-  public final String label;
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Event.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Event.java
deleted file mode 100644
index dda21e0c73d85ad386ba4c7d375e0335bfd8d3d2..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/Event.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.ApiModelProperty;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-
-import java.util.List;
-
-/**
- * Some system change event.
- *
- * @author rschoene - Initial contribution
- */
-@Data
-@AllArgsConstructor
-public abstract class Event {
-  @ApiModelProperty(notes = "Time when this event happened")
-  public final long timestamp;
-  @ApiModelProperty(notes = "Some identifier for the event")
-  public final int identifier;
-  @ApiModelProperty(notes = "A list of items changed due to this event")
-  public final List<ChangedItem> changedItems;
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/EventController.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/EventController.java
deleted file mode 100644
index 5b6407998969b53f0a57c6de73986bb7fedd4903..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/EventController.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiOperation;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-import java.util.*;
-
-/**
- * Spring boot controller specifying routes to REST API for events.
- *
- * @author rschoene - Initial contribution
- */
-@RestController("events")
-@RequestMapping("/event")
-@Api(value = "event-data", description = "Recent events (recognitions or manual changes)")
-public class EventController {
-
-  private List<Event> dummyListOfEvents = new ArrayList<>();
-  private Map<Integer, Event> dummyMapOfEvents = new HashMap<>();
-
-  public EventController() {
-    add(new RecognitionEvent(1547637740, 1, Arrays.asList(
-        new ChangedItem("iris1", "green", "Hue Iris 1"),
-        new ChangedItem("go1", "green", "Hue Go 1")), 1, "Sitting in armchair"));
-    add(new RecognitionEvent(1547637750, 2, Collections.emptyList(),
-        1, "Sitting in armchair"));
-    add(new RecognitionEvent(1547623460, 4, Collections.singletonList(
-        new ChangedItem("go2", "off", "Hue Go 2")), 1, "Going to sleep"));
-    add(new ManualChangeEvent(1501146256, 5, Arrays.asList(
-        new ChangedItem("iris1", "green", "Hue Iris 1"),
-        new ChangedItem("go1", "red", "Hue Go 1"),
-        new ChangedItem("go2", "#EE7F00", "Hue Go 2"))));
-  }
-
-  public void add(Event event) {
-    dummyListOfEvents.add(event);
-    dummyMapOfEvents.put(event.getIdentifier(), event);
-  }
-
-  @ApiOperation(value = "Get all events in long form", response = List.class)
-  @GetMapping(value = "", produces = "application/json")
-  public List<Event> getAllEvents() {
-    return dummyListOfEvents;
-  }
-
-  @ApiOperation(value = "Get detailed information of one event", response = Event.class)
-  @GetMapping(value = "/{identifier}", produces = "application/json")
-  public Event getEvent(@PathVariable int identifier) {
-    return dummyMapOfEvents.get(identifier);
-  }
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ManualChangeEvent.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ManualChangeEvent.java
deleted file mode 100644
index 69bf77692f9d1a013d092d8c661c0d1633f55f69..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/ManualChangeEvent.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.ApiModelProperty;
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-
-import java.util.List;
-
-/**
- * Manual change of items in the system by an user.
- *
- * @author rschoene - Initial contribution
- */
-@EqualsAndHashCode(callSuper = true)
-@Data
-public class ManualChangeEvent extends Event {
-  @ApiModelProperty(notes = "The type of the event")
-  public final String type = "manual";
-
-  public ManualChangeEvent(long timestamp, int identifier, List<ChangedItem> changedItems) {
-    super(timestamp, identifier, changedItems);
-  }
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/RecognitionEvent.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/RecognitionEvent.java
deleted file mode 100644
index 24ed98e01585504300cec39273ec06145cbc9350..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/RecognitionEvent.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import io.swagger.annotations.ApiModelProperty;
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-
-import java.util.List;
-
-/**
- * Change of items automatically done by the system.
- *
- * @author rschoene - Initial contribution
- */
-@EqualsAndHashCode(callSuper = true)
-@Data
-public class RecognitionEvent extends Event {
-  @ApiModelProperty(notes = "The identifier of the activity causing this event")
-  public final int activity;
-  @ApiModelProperty(notes = "The description of the activity")
-  public final String description;
-  @ApiModelProperty(notes = "The type of the event")
-  public final String type = "recognition";
-
-  public RecognitionEvent(long timestamp, int identifier, List<ChangedItem> changedItems, int activity, String description) {
-    super(timestamp, identifier, changedItems);
-    this.activity = activity;
-    this.description = description;
-  }
-
-}
diff --git a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/SwaggerConfig.java b/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/SwaggerConfig.java
deleted file mode 100644
index fff9d9d36e1e966784f289efb91e9a92dcd82c43..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/java/de/tudresden/inf/st/eraser/rest/SwaggerConfig.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package de.tudresden.inf.st.eraser.rest;
-
-import com.google.common.base.Predicates;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import springfox.documentation.builders.ApiInfoBuilder;
-import springfox.documentation.builders.PathSelectors;
-import springfox.documentation.builders.RequestHandlerSelectors;
-import springfox.documentation.service.ApiInfo;
-import springfox.documentation.service.Contact;
-import springfox.documentation.spi.DocumentationType;
-import springfox.documentation.spring.web.plugins.Docket;
-import springfox.documentation.swagger2.annotations.EnableSwagger2;
-
-/**
- * Configuration class to enable swagger.
- *
- * @author rschoene - Initial contribution
- */
-@Configuration
-@EnableSwagger2
-public class SwaggerConfig {
-  @SuppressWarnings("Guava")
-  @Bean
-  public Docket api() {
-    return new Docket(DocumentationType.SWAGGER_2)
-        .select()
-        .apis(Predicates.not(RequestHandlerSelectors.basePackage("org.springframework")))
-        .paths(PathSelectors.any())
-        .build()
-        .apiInfo(metaData());
-  }
-
-  private ApiInfo metaData() {
-    return new ApiInfoBuilder()
-        .title("OpenLicht Knowledge-Base REST API")
-        .description("\"OpenLicht-REST-Server to get recent recognitions, manual settings and activities\"")
-        .version("1.0.0")
-        .license("MIT License")
-        .licenseUrl("https://opensource.org/licenses/MIT")
-        .contact(new Contact("René Schöne",
-            "http://tu-dresden.de/die_tu_dresden/fakultaeten/fakultaet_informatik/smt/st/mitarbeiter?person=375",
-            "rene.schoene@tu-dresden.de"))
-        .build();
-  }
-}
diff --git a/eraser.rest/src/main/resources/log4j2.xml b/eraser.rest/src/main/resources/log4j2.xml
deleted file mode 100644
index 867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c..0000000000000000000000000000000000000000
--- a/eraser.rest/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration>
-    <Appenders>
-        <Console name="Console">
-            <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
-        </Console>
-        <RollingFile name="RollingFile" fileName="logs/eraser.log"
-                    filePattern="logs/eraser-%i.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
-            <Policies>
-                <OnStartupTriggeringPolicy/>
-            </Policies>
-            <DefaultRolloverStrategy max="20"/>
-        </RollingFile>
-    </Appenders>
-    <Loggers>
-        <Root level="debug">
-            <AppenderRef ref="Console"/>
-            <AppenderRef ref="RollingFile"/>
-        </Root>
-    </Loggers>
-</Configuration>
diff --git a/influx_test/.gitignore b/influx_test/.gitignore
deleted file mode 100644
index 70b583e34c3316bcd77c807e2d6b85db5e7d49f6..0000000000000000000000000000000000000000
--- a/influx_test/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/build/
-/bin/
-logs/
diff --git a/influx_test/build.gradle b/influx_test/build.gradle
deleted file mode 100644
index 50fdeee4c477c0d43ad4ce73436630531a423a32..0000000000000000000000000000000000000000
--- a/influx_test/build.gradle
+++ /dev/null
@@ -1,22 +0,0 @@
-apply plugin: 'application'
-
-dependencies {
-    compile project(':eraser-base')
-    compile group: 'org.influxdb', name: 'influxdb-java', version: '2.15'
-}
-
-run {
-    mainClassName = 'de.tudresden.inf.st.eraser.influx_test.Main'
-    standardInput = System.in
-    if (project.hasProperty("appArgs")) {
-        args Eval.me(appArgs)
-    }
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/influx_test/src/main/java/de/tudresden/inf/st/eraser/influx_test/Main.java b/influx_test/src/main/java/de/tudresden/inf/st/eraser/influx_test/Main.java
deleted file mode 100644
index ed4bbdfae50f5ad3909c3badfd5bf7f948c7a1ac..0000000000000000000000000000000000000000
--- a/influx_test/src/main/java/de/tudresden/inf/st/eraser/influx_test/Main.java
+++ /dev/null
@@ -1,128 +0,0 @@
-package de.tudresden.inf.st.eraser.influx_test;
-
-import org.influxdb.InfluxDB;
-import org.influxdb.InfluxDBFactory;
-import org.influxdb.annotation.Column;
-import org.influxdb.annotation.Measurement;
-import org.influxdb.dto.Point;
-import org.influxdb.dto.Pong;
-import org.influxdb.dto.Query;
-import org.influxdb.impl.InfluxDBResultMapper;
-
-import java.time.Instant;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-@SuppressWarnings("deprecation")
-public class Main {
-
-  @Measurement(name = "ItemD")
-  public static class ItemWithDoubleStatePoint {
-    @Column(name = "time")
-    private Instant time;
-
-    @Column(name = "state")
-    private Double state;
-
-    @Column(name = "id", tag = true)
-    private String id;
-
-    static ItemWithDoubleStatePoint of(double state, String id) {
-      return of(Instant.now(), state, id);
-    }
-
-    static ItemWithDoubleStatePoint of(Instant time, double state, String id) {
-      ItemWithDoubleStatePoint point = new ItemWithDoubleStatePoint();
-      point.time = time;
-      point.state = state;
-      point.id = id;
-      return point;
-    }
-
-    Point build() {
-      return Point.measurement("ItemD")
-          .time(time.toEpochMilli(), TimeUnit.MILLISECONDS)
-          .addField("state", state)
-          .tag("id", id)
-          .build();
-    }
-
-    @Override
-    public String toString() {
-      return "ItemD [" + id + "@" + time + ": " + state + "]";
-    }
-  }
-
-  // InfluxDB connections settings
-  private static final String host = "172.22.1.152";
-  private static final int port = 8086;
-  private static final String user = "root";
-  private static final String password = "root";
-  private static final String dbName = "jastaddHistoryMain";
-
-  public static void main(String[] args) {
-    testInflux();
-  }
-
-  private static void testInflux() {
-    // see https://github.com/influxdata/influxdb-java
-    String url = String.format("http://%s:%s", host, port);
-    InfluxDB influxDB = InfluxDBFactory.connect(url, user, password);
-    Pong response = influxDB.ping();
-    if (response.getVersion().equalsIgnoreCase("unknown")) {
-      System.err.println("Error pinging server");
-      return;
-    }
-
-    if (databaseExists(influxDB)) {
-      deleteDatabase(influxDB);
-    }
-    createDatabase(influxDB);
-    influxDB.setDatabase(dbName);
-    createDefaultRetentionPolicy(influxDB);
-    InfluxDBResultMapper resultMapper = new InfluxDBResultMapper();
-    ItemWithDoubleStatePoint point;
-    Query q;
-    List<ItemWithDoubleStatePoint> result;
-
-    // add one measurement
-    point = ItemWithDoubleStatePoint.of(0.3, "iris1_item");
-    influxDB.write(point.build());
-
-    // read all measurements
-    q = new Query("SELECT id, state FROM ItemD WHERE id = 'iris1_item'", dbName);
-    result = resultMapper.toPOJO(influxDB.query(q), ItemWithDoubleStatePoint.class);
-    System.out.println(result);
-
-    // add another measurement
-    point = ItemWithDoubleStatePoint.of(0.4, "iris1_item");
-    influxDB.write(point.build());
-
-    // and read all measurements, should be two now
-    q = new Query("SELECT id, state FROM ItemD WHERE id = 'iris1_item'", dbName);
-    result = resultMapper.toPOJO(influxDB.query(q), ItemWithDoubleStatePoint.class);
-    System.out.println(result);
-  }
-
-  private static boolean databaseExists(InfluxDB influxDB) {
-//    Query query = new Query("SHOW DATABASES", dbName);
-    return influxDB.databaseExists(dbName);
-  }
-
-  private static void deleteDatabase(InfluxDB influxDB) {
-    influxDB.deleteDatabase(dbName);
-//    influxDB.query(Query.encode("CREATE DATABASE \"" + dbName + "\""));
-  }
-
-  private static void createDatabase(InfluxDB influxDB) {
-    influxDB.createDatabase(dbName);
-//    influxDB.query(Query.encode("CREATE DATABASE \"" + dbName + "\""));
-  }
-
-  private static void createDefaultRetentionPolicy(InfluxDB influxDB) {
-    String rpName = "aRetentionPolicy";
-    influxDB.createRetentionPolicy(rpName, dbName, "30d", "30m", 2, true);
-    influxDB.setRetentionPolicy(rpName);
-  }
-}
diff --git a/influx_test/src/main/resources/log4j2.xml b/influx_test/src/main/resources/log4j2.xml
deleted file mode 100644
index 867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c..0000000000000000000000000000000000000000
--- a/influx_test/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration>
-    <Appenders>
-        <Console name="Console">
-            <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
-        </Console>
-        <RollingFile name="RollingFile" fileName="logs/eraser.log"
-                    filePattern="logs/eraser-%i.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
-            <Policies>
-                <OnStartupTriggeringPolicy/>
-            </Policies>
-            <DefaultRolloverStrategy max="20"/>
-        </RollingFile>
-    </Appenders>
-    <Loggers>
-        <Root level="debug">
-            <AppenderRef ref="Console"/>
-            <AppenderRef ref="RollingFile"/>
-        </Root>
-    </Loggers>
-</Configuration>
diff --git a/learner_test/.gitignore b/learner_test/.gitignore
deleted file mode 100644
index 84c048a73cc2e5dd24f807669eb99b0ce3123195..0000000000000000000000000000000000000000
--- a/learner_test/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/build/
diff --git a/learner_test/build.gradle b/learner_test/build.gradle
deleted file mode 100644
index a30850aa0ba078ba9858e356cadbb812004d0983..0000000000000000000000000000000000000000
--- a/learner_test/build.gradle
+++ /dev/null
@@ -1,30 +0,0 @@
-repositories {
-    mavenCentral()
-}
-
-sourceCompatibility = 1.8
-
-apply plugin: 'java'
-apply plugin: 'application'
-
-dependencies {
-    compile project(':eraser-base')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: "${jackson_version}"
-    compile 'org.encog:encog-core:3.4'
-}
-
-run {
-    mainClassName = 'de.tudresden.inf.st.eraser.learner_test.Main'
-    standardInput = System.in
-    if (project.hasProperty("appArgs")) {
-        args Eval.me(appArgs)
-    }
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/Main.java b/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/Main.java
deleted file mode 100644
index 62f00cb624f4dbae09c84bca772d09318f5c9964..0000000000000000000000000000000000000000
--- a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/Main.java
+++ /dev/null
@@ -1,112 +0,0 @@
-package de.tudresden.inf.st.eraser.learner_test;
-import org.encog.Encog;
-import org.encog.ml.MLClassification;
-import org.encog.ml.data.MLData;
-import org.encog.persist.EncogDirectoryPersistence;
-import org.encog.util.csv.CSVFormat;
-import org.encog.util.csv.ReadCSV;
-import org.encog.util.simple.EncogUtility;
-
-import org.encog.ml.data.versatile.NormalizationHelper;
-import org.encog.ml.data.versatile.VersatileMLDataSet;
-import org.encog.ml.data.versatile.columns.ColumnDefinition;
-import org.encog.ml.data.versatile.columns.ColumnType;
-import org.encog.ml.data.versatile.sources.VersatileDataSource;
-import org.encog.ml.data.versatile.sources.CSVDataSource;
-import org.encog.ml.factory.MLMethodFactory;
-import org.encog.ml.model.EncogModel;
-import org.encog.ConsoleStatusReportable;
-import org.encog.ml.MLRegression;
-import java.io.File;
-import java.util.Arrays;
-import static org.encog.persist.EncogDirectoryPersistence.*;
-
-public class Main {
-
-  public static void main(String[] args) {
-    //mapping the data into model
-    String savefile = "src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model.eg";
-    String File = "src/main/java/de/tudresden/inf/st/eraser/learner_test/preference_data.csv";
-    File file = new File(File);
-    VersatileDataSource source = new CSVDataSource(file, false, CSVFormat.DECIMAL_POINT);
-    VersatileMLDataSet data = new VersatileMLDataSet(source);
-    data.defineSourceColumn("monat", 0, ColumnType.continuous);
-    data.defineSourceColumn("day", 1, ColumnType.continuous);
-    data.defineSourceColumn("hour", 2, ColumnType.continuous);
-    data.defineSourceColumn("minute", 3, ColumnType.continuous);
-    ColumnDefinition outputColumn = data.defineSourceColumn("labels", 4, ColumnType.continuous);
-    data.defineSingleOutputOthersInput(outputColumn);
-    data.analyze();
-    System.out.println("get data ");
-    EncogModel model = new EncogModel(data);
-    model.selectMethod(data, MLMethodFactory.TYPE_FEEDFORWARD);
-    //model.setReport(new ConsoleStatusReportable());
-    data.normalize();
-    NormalizationHelper helper = data.getNormHelper();
-    System.out.println(helper.toString());
-    model.holdBackValidation(0.3, true, 1001);
-    model.selectTrainingType(data);
-    MLRegression bestMethod = (MLRegression)model.crossvalidate(5, true);
-    MLClassification bestMethodtest=(MLClassification)model.crossvalidate(5,true);
-    /**System.out.println( "Training error: " + EncogUtility.calculateRegressionError(bestMethod, model.getTrainingDataset()));
-    System.out.println( "testTraining error: " + EncogUtility.calculateClassificationError(bestMethodtest, model.getTrainingDataset()));
-    System.out.println( "Validation error: " + EncogUtility.calculateRegressionError(bestMethod, model.getValidationDataset()));
-    System.out.println( "testValidation error: " + EncogUtility.calculateClassificationError(bestMethodtest, model.getValidationDataset()));
-
-    System.out.println(helper.getClass());
-    System.out.println(helper.toString());
-    System.out.println("Final model: " + bestMethod);
-    System.out.println("Final testmodel: " + bestMethodtest);**/
-    //NormalizationHelper helper = data.getNormHelper();
-
-    //test
-    String helperstr=helper.toString();
-    String [] split=helperstr.split(";");
-    String [] finalStr = split[split.length-1].replace("]","").replace("[","").
-            split(",");
-    System.out.println(helper);
-
-    // save network...
-    //to delete
-    saveObject(new File(savefile), bestMethodtest);
-    ReadCSV csv = new ReadCSV(File, false, CSVFormat.DECIMAL_POINT);
-    String[] line = new String[4];
-    MLData input = helper.allocateInputVector();
-    System.out.println("input test---------------");
-    System.out.println(input);
-    while(csv.next()) {
-      StringBuilder result = new StringBuilder();
-      line[0] = csv.get(0);
-      line[1] = csv.get(1);
-      line[2] = csv.get(2);
-      line[3] = csv.get(3);
-      String correct = csv.get(4);
-      helper.normalizeInputVector(line,input.getData(),false);
-
-      MLData output = bestMethod.compute(input);
-      System.out.println("inputs:");
-      System.out.println(input);
-      System.out.println("outputs:");
-      System.out.println(output);
-      String brightnessChosen = helper.denormalizeOutputVectorToString(output)[0];
-
-      result.append(Arrays.toString(line));
-      result.append(" -> predicted: ");
-      result.append(brightnessChosen);
-      result.append("(correct: ");
-      result.append(correct);
-      result.append(")");
-      System.out.println(result.toString());
-      break;
-
-    }
-    // Delete data file and shut down.
-    //File.delete();
-    Encog.getInstance().shutdown();
-    /**Training error: 0.299928703107046
-    testTraining error: 0.9931740614334471
-    Validation error: 0.41277024952020763
-    testValidation error: 0.992*/
-
-  }
-}
diff --git a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/final_data.csv b/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/final_data.csv
deleted file mode 100644
index a1e263194d3ee4f95c64c3eff3b5123fb2246cf5..0000000000000000000000000000000000000000
--- a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/final_data.csv
+++ /dev/null
@@ -1,418 +0,0 @@
-7,20,12,13,2
-7,20,14,40,1
-7,20,14,40,2
-7,21,13,2,2
-7,21,13,2,2
-7,21,14,23,2
-7,21,14,23,2
-7,21,15,41,2
-7,21,16,54,2
-7,21,16,54,2
-7,21,17,45,3
-7,22,12,28,3
-7,22,15,35,2
-7,22,15,35,2
-7,22,18,59,3
-7,22,18,59,3
-7,23,12,32,2
-7,23,12,32,2
-7,23,16,7,2
-7,23,16,7,2
-7,23,16,7,2
-7,23,16,7,2
-7,23,16,7,2
-7,24,12,4,0
-7,24,12,4,0
-7,24,12,4,1
-7,24,14,38,2
-7,24,14,38,2
-7,24,18,54,3
-7,25,12,31,0
-7,25,12,32,1
-7,25,12,32,1
-7,25,15,6,3
-7,25,18,56,3
-7,26,13,41,2
-7,26,19,14,3
-7,27,11,39,2
-7,27,11,39,3
-7,27,11,46,3
-7,27,11,46,2
-7,27,13,8,2
-7,27,13,8,2
-7,27,13,9,2
-7,27,13,45,2
-7,27,13,45,2
-7,27,15,38,3
-7,28,12,12,2
-7,28,12,13,2
-7,28,12,41,2
-7,28,12,41,2
-7,28,12,41,2
-7,28,14,0,1
-7,28,14,0,2
-7,28,15,21,3
-7,28,18,56,3
-7,29,10,9,1
-7,29,10,9,1
-7,29,10,9,1
-7,29,11,54,0
-7,29,11,54,0
-7,29,11,54,0
-7,29,11,54,1
-7,29,14,10,2
-7,29,16,44,2
-7,29,16,44,2
-7,30,16,7,3
-7,30,18,45,3
-7,31,13,2,0
-7,31,13,2,1
-7,31,13,3,1
-7,31,13,3,1
-7,31,13,3,1
-7,31,18,39,3
-8,1,12,22,0
-8,1,12,22,1
-8,1,14,20,2
-8,1,14,20,2
-8,1,14,20,2
-8,1,15,55,3
-8,1,18,31,3
-8,1,18,37,3
-8,1,18,37,3
-8,1,19,2,3
-8,1,19,2,3
-8,1,20,5,3
-8,2,10,9,2
-8,2,10,9,1
-8,2,10,9,2
-8,2,10,9,2
-8,2,13,58,2
-8,2,13,58,2
-8,2,15,44,3
-8,2,15,44,3
-8,2,15,44,3
-8,2,17,21,3
-8,2,17,21,3
-8,2,17,21,3
-8,3,13,31,1
-8,3,13,31,2
-8,3,13,32,2
-8,3,16,43,3
-8,4,13,20,1
-8,4,13,20,2
-8,4,18,27,3
-8,5,13,37,2
-8,5,13,37,2
-8,5,18,33,3
-8,6,11,24,3
-8,6,11,24,3
-8,6,11,24,3
-8,6,13,50,3
-8,7,13,4,2
-8,7,13,4,2
-8,7,14,56,3
-8,8,12,13,2
-8,8,12,13,2
-8,8,15,51,2
-8,8,15,51,2
-8,8,15,51,3
-8,9,13,32,2
-8,9,13,32,2
-8,9,13,32,2
-8,9,15,8,2
-8,9,15,8,2
-8,9,15,8,2
-8,9,16,19,2
-8,10,11,32,0
-8,10,11,32,1
-8,10,11,32,1
-8,10,13,13,1
-8,10,13,13,1
-8,10,13,13,2
-8,10,16,42,3
-8,10,16,42,3
-8,11,14,6,2
-8,11,14,7,2
-8,11,18,54,3
-8,11,18,54,3
-8,11,18,54,3
-8,12,12,27,1
-8,12,12,27,1
-8,12,12,28,1
-8,12,13,53,2
-8,12,13,53,2
-8,12,13,53,2
-8,12,15,21,3
-8,13,13,16,1
-8,13,13,16,1
-8,13,13,16,1
-8,13,14,14,2
-8,13,14,14,2
-8,13,16,11,3
-8,13,17,18,3
-8,14,13,7,1
-8,14,13,7,1
-8,14,13,7,1
-8,14,13,7,1
-8,14,13,7,2
-8,14,13,7,2
-8,14,15,6,3
-8,15,14,5,2
-8,15,14,5,2
-8,15,14,6,2
-8,15,14,6,2
-8,15,16,41,3
-8,15,16,41,3
-8,15,17,30,3
-8,16,13,40,2
-8,16,13,40,2
-8,16,17,52,3
-8,16,17,53,3
-8,17,13,34,1
-8,17,13,35,2
-8,17,14,7,2
-8,17,19,2,3
-8,18,10,21,3
-8,18,11,14,2
-8,18,11,14,2
-8,18,11,14,2
-8,18,11,14,2
-8,18,14,25,2
-8,18,14,25,3
-8,18,14,25,2
-8,18,18,18,3
-8,18,18,19,3
-8,19,18,33,3
-8,19,18,33,3
-8,19,18,33,3
-8,19,18,33,3
-8,20,14,28,2
-8,20,14,28,2
-8,20,14,28,2
-8,20,14,28,2
-8,20,17,8,3
-8,20,18,22,3
-8,21,11,24,1
-8,21,11,24,1
-8,21,11,24,1
-8,21,15,34,3
-8,21,18,55,3
-8,22,12,3,1
-8,22,12,4,2
-8,22,12,4,2
-8,22,13,51,2
-8,22,13,51,2
-8,22,13,51,2
-8,22,18,12,3
-8,22,18,12,3
-8,22,18,12,3
-8,22,18,12,3
-8,22,18,40,3
-8,22,18,40,3
-8,23,13,42,1
-8,23,13,42,1
-8,23,17,32,3
-8,23,19,28,3
-8,23,20,27,3
-8,23,20,27,3
-8,23,21,49,3
-8,24,14,0,2
-8,24,14,0,2
-8,24,14,0,2
-8,24,14,0,2
-8,24,15,4,3
-8,24,15,4,3
-8,24,16,2,3
-8,24,16,3,3
-8,24,16,37,3
-8,24,17,9,3
-8,24,17,14,3
-8,25,13,34,1
-8,25,13,34,1
-8,25,13,34,1
-8,25,13,34,1
-8,25,13,34,1
-8,25,15,1,3
-8,25,17,58,3
-8,26,10,29,0
-8,26,10,29,0
-8,26,10,29,0
-8,26,10,29,0
-8,26,10,29,0
-8,26,16,42,3
-8,26,16,42,3
-8,26,18,41,3
-8,26,18,41,3
-8,27,13,41,2
-8,27,13,41,2
-8,27,13,41,2
-8,27,13,41,2
-8,27,17,42,3
-8,28,11,9,1
-8,28,11,9,1
-8,28,12,14,0
-8,28,12,14,1
-8,28,12,14,0
-8,28,15,3,2
-8,28,15,3,2
-8,28,16,31,3
-8,28,17,40,3
-8,29,14,44,3
-8,29,17,25,3
-8,30,12,5,0
-8,30,12,5,0
-8,30,12,5,0
-8,30,13,32,1
-8,30,13,32,1
-8,30,13,56,2
-8,30,14,23,2
-8,30,14,23,2
-8,30,14,23,2
-8,30,14,23,2
-8,30,14,41,2
-8,30,14,41,2
-8,30,14,41,2
-8,30,15,50,3
-8,30,17,0,3
-8,30,18,59,3
-8,30,18,59,3
-8,31,14,31,2
-8,31,14,31,2
-8,31,14,31,2
-8,31,17,59,3
-8,31,18,0,3
-9,1,16,13,3
-9,1,16,13,3
-9,1,16,13,3
-9,1,17,41,3
-9,2,13,44,1
-9,2,13,44,1
-9,2,13,44,1
-9,2,14,49,2
-9,2,14,49,2
-9,2,14,49,2
-9,2,16,6,3
-9,2,16,6,3
-9,2,17,2,3
-9,3,16,9,3
-9,3,17,35,3
-9,3,17,36,3
-9,4,12,57,1
-9,4,12,57,1
-9,4,15,8,3
-9,4,15,34,3
-9,4,16,26,3
-9,4,16,26,3
-9,4,18,37,3
-9,4,18,37,3
-9,4,18,37,3
-9,6,11,18,0
-9,6,11,18,0
-9,6,12,54,1
-9,6,12,54,1
-9,6,14,21,2
-9,6,14,21,2
-9,6,19,20,3
-9,7,11,50,0
-9,7,14,17,2
-9,7,14,57,3
-9,7,14,57,3
-9,7,16,56,3
-9,7,16,56,3
-9,7,16,56,3
-9,7,16,56,3
-9,7,18,38,3
-9,7,18,38,3
-9,8,11,4,2
-9,8,11,4,2
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,13,0
-9,8,11,14,0
-9,8,11,14,1
-9,8,11,14,1
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,0
-9,8,12,1,1
-9,8,12,36,0
-9,8,12,36,0
-9,8,12,36,0
-9,8,12,36,0
-9,8,12,36,0
-9,8,13,37,1
-9,8,13,37,1
-9,8,13,37,1
-9,8,14,20,2
-9,8,14,20,2
-9,8,18,20,3
-9,9,12,47,1
-9,9,12,47,2
-9,9,12,47,2
-9,9,19,5,3
-9,10,13,15,1
-9,10,13,15,1
-9,10,13,15,0
-9,10,16,49,3
-9,10,19,6,3
-9,10,21,5,3
-9,11,14,16,2
-9,11,14,16,2
-9,11,14,16,2
-9,11,18,41,3
-9,12,14,43,2
-9,12,14,43,2
-9,12,14,43,2
-9,12,16,14,3
-9,12,17,12,3
-9,12,17,12,2
-9,12,17,12,3
-9,12,17,12,2
-9,12,20,44,3
-9,13,19,52,3
-9,14,14,39,2
-9,14,14,39,2
-9,14,15,14,3
-9,14,17,29,3
-9,14,17,29,3
-9,14,17,29,3
-9,15,11,41,1
-9,15,11,41,1
-9,15,13,4,1
-9,15,14,3,1
-9,15,14,3,2
-9,16,12,36,1
-9,16,12,36,1
-9,16,12,36,1
-9,16,12,36,1
-9,16,12,48,1
-9,16,12,48,1
-9,16,13,51,1
-9,16,13,51,2
-9,16,13,51,1
-9,16,15,13,3
-9,16,15,14,3
-9,16,15,14,3
-9,17,10,27,0
-9,17,10,27,0
-9,17,11,10,0
-9,17,11,10,0
-9,17,11,10,0
-9,17,12,43,1
-9,17,12,43,1
-9,17,12,43,1
-9,17,13,32,1
-9,17,13,32,1
-9,17,14,5,1
-9,17,14,5,2
-9,17,14,6,2
-9,17,15,7,3
-9,17,15,49,3
-9,17,15,49,3
-9,17,18,12,3
-9,17,18,13,3
diff --git a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model.eg b/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model.eg
deleted file mode 100644
index 47c37ff28ef95b7b22a2df05ec050c3ffeb53a30..0000000000000000000000000000000000000000
--- a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model.eg
+++ /dev/null
@@ -1,24 +0,0 @@
-encog,BasicNetwork,java,3.4.0,1,1554196571101
-[BASIC]
-[BASIC:PARAMS]
-[BASIC:NETWORK]
-beginTraining=0
-connectionLimit=0
-contextTargetOffset=0,0,0
-contextTargetSize=0,0,0
-endTraining=2
-hasContext=f
-inputCount=4
-layerCounts=1,8,5
-layerFeedCounts=1,7,4
-layerContextCount=0,0,0
-layerIndex=0,1,9
-output=0.2537517424,0.3154675575,-0.8739039638,-0.4408848221,-0.8484433638,-0.999915299,-0.6964984771,-0.208278439,1,0,0,-0.4545454545,0.3559322034,1
-outputCount=1
-weightIndex=0,8,43
-weights=0.5976774048,-0.7925906525,0.7127327881,-0.9611660362,0.8031350986,-0.7286657218,1.0990482817,-0.5985785536,-0.0783115433,0.575612931,1.1267500918,1.7184744034,0.2271044512,-1.0525796764,0.0900869671,1.1492323512,0.6141715555,-1.0455927965,-0.0925453451,0.2471651431,2.3634316872,0.3939369257,0.4607437082,-0.1435186798,0.8428535365,-0.0848896791,-0.070602589,-1.2640263565,2.4899996734,-0.2185394776,10.3421332361,-0.1650898311,-0.2750133571,-0.79680959,-0.8051139953,0.8219933747,-0.0727160299,-0.4609522002,-1.0410685492,-0.5354063412,0.3028724456,-0.6835374219,0.169591233
-biasActivation=0,1,1
-[BASIC:ACTIVATION]
-"org.encog.engine.network.activation.ActivationTANH"
-"org.encog.engine.network.activation.ActivationTANH"
-"org.encog.engine.network.activation.ActivationLinear"
diff --git a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model_test.eg b/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model_test.eg
deleted file mode 100644
index 62fe6421d95e164aa14e123b950e831e37a5f23c..0000000000000000000000000000000000000000
--- a/learner_test/src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model_test.eg
+++ /dev/null
@@ -1,24 +0,0 @@
-encog,BasicNetwork,java,3.4.0,1,1548158734516
-[BASIC]
-[BASIC:PARAMS]
-[BASIC:NETWORK]
-beginTraining=0
-connectionLimit=0
-contextTargetOffset=0,0,0
-contextTargetSize=0,0,0
-endTraining=2
-hasContext=f
-inputCount=4
-layerCounts=4,8,5
-layerFeedCounts=4,7,4
-layerContextCount=0,0,0
-layerIndex=0,4,12
-output=0.6991387348,-0.8711034513,-0.996886038,-0.832747291,-0.0935682806,-0.9996163977,0.5399150265,0.9411173394,-0.5084989975,0.4850010791,0.9999999957,1,0,-0.6666666667,-0.4545454545,0.6949152542,1
-outputCount=4
-weightIndex=0,32,67
-weights=-2.6901880743,0.6512821123,-1.2270002115,1.63124668,0.1982387305,-0.2994789552,1.5833040739,-0.9450411677,2.0541422847,-0.718279397,-1.1761952241,0.5028631512,0.0690323612,-1.496141565,-0.1955149568,-0.7453976822,-0.3691141073,0.9854755554,2.2113850088,-1.5216550292,0.9652087936,-1.3028209693,-1.3346156171,0.4142247818,1.0821207364,0.1987534858,0.6202881884,-0.2940331887,-1.4643282498,2.6960334656,-0.0167663298,-2.9907087565,0.3469960227,-0.0441249736,-2.5998575813,-0.7106361301,-0.8111809962,2.2216158678,-0.5482762437,-1.7996398291,-3.6734127565,-2.9102547958,0.4845401914,0.3760471288,-0.0124987546,0.3784047483,0.5860932613,-0.2682876707,0.7429004186,-7.559247176,-3.4421363532,1.1989747484,-2.3340717496,-1.4740773042,-0.7795788072,-1.8241693655,-0.630132295,-0.8191869009,-0.4060569987,-1.0997423162,-0.5495165849,0.1407829068,-2.2964930412,0.0798893221,-19.5271913755,2.0474187009,-0.2622671892
-biasActivation=0,1,1
-[BASIC:ACTIVATION]
-"org.encog.engine.network.activation.ActivationTANH"
-"org.encog.engine.network.activation.ActivationTANH"
-"org.encog.engine.network.activation.ActivationLinear"
diff --git a/learner_test/src/main/resources/log4j2.xml b/learner_test/src/main/resources/log4j2.xml
deleted file mode 100644
index 0594576fac98ba859e411597c90c8e3d989378bd..0000000000000000000000000000000000000000
--- a/learner_test/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration>
-    <Appenders>
-        <Console name="Console">
-            <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
-        </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
-            <Policies>
-                <OnStartupTriggeringPolicy/>
-            </Policies>
-            <DefaultRolloverStrategy max="20"/>
-        </RollingFile>
-    </Appenders>
-    <Loggers>
-        <Root level="debug">
-            <AppenderRef ref="Console"/>
-            <AppenderRef ref="RollingFile"/>
-        </Root>
-    </Loggers>
-</Configuration>
diff --git a/ml_test/.gitignore b/ml_test/.gitignore
deleted file mode 100644
index 70b583e34c3316bcd77c807e2d6b85db5e7d49f6..0000000000000000000000000000000000000000
--- a/ml_test/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/build/
-/bin/
-logs/
diff --git a/ml_test/build.gradle b/ml_test/build.gradle
deleted file mode 100644
index a9e198eb94876643269b680a0de267e69724100f..0000000000000000000000000000000000000000
--- a/ml_test/build.gradle
+++ /dev/null
@@ -1,21 +0,0 @@
-apply plugin: 'application'
-
-dependencies {
-    compile project(':eraser-base')
-}
-
-run {
-    mainClassName = 'de.tudresden.inf.st.eraser.ml_test.Main'
-    standardInput = System.in
-    if (project.hasProperty("appArgs")) {
-        args Eval.me(appArgs)
-    }
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java b/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java
deleted file mode 100644
index 535321b63883c8f37cb122b787a6ba3cb70b0fb7..0000000000000000000000000000000000000000
--- a/ml_test/src/main/java/de/tudresden/inf/st/eraser/ml_test/Main.java
+++ /dev/null
@@ -1,195 +0,0 @@
-package de.tudresden.inf.st.eraser.ml_test;
-
-import de.tudresden.inf.st.eraser.jastadd.model.*;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-
-public class Main {
-
-  private static final Logger logger = LogManager.getLogger(Main.class);
-
-  public static void main(String[] args) {
-    logger.info("Hello World!");
-    createAndTestBrightnessNetwork();
-    createAndTestColorNetwork();
-  }
-
-  private static Root createModel() {
-    Root root = Root.createEmptyRoot();
-    Group group = new Group();
-    group.setID("Group1");
-    root.getSmartHomeEntityModel().addGroup(group);
-    Item activityItem = newItem("activity", "Recognized activity", false, 8);
-    Item brightnessItem = newItem("brightness", "Measured brightness", false, 5);
-    group.addItem(activityItem);
-    group.addItem(brightnessItem);
-    return root;
-  }
-
-  private static NumberItem newItem(String id, String label, boolean defaultSendState, int initialState) {
-    NumberItem item = new NumberItem();
-    item.setID(id);
-    item.setLabel(label);
-    if (defaultSendState) {
-      item.enableSendState();
-    } else {
-      item.disableSendState();
-    }
-    item.setState(initialState);
-    return item;
-  }
-
-  private static final int REPETITIONS = 20;
-  private static final boolean CHANGE_WEIGHTS_IN_BETWEEN = true;
-  private static final Random random = new Random(0);
-  private static void classifyTimed(
-      NeuralNetworkRoot nn,
-      Function<NeuralNetworkRoot, DoubleNumber> classify,
-      Function<DoubleNumber, String> leafToString) {
-    List<String> results = new ArrayList<>();
-    List<Long> times = new ArrayList<>();
-    for (int i = 0; i < REPETITIONS; i++) {
-      long before = System.nanoTime();
-      DoubleNumber classification = classify.apply(nn);
-      long diff = System.nanoTime() - before;
-      results.add(leafToString.apply(classification));
-      times.add(TimeUnit.NANOSECONDS.toMillis(diff));
-      if (CHANGE_WEIGHTS_IN_BETWEEN) {
-        HiddenNeuron hiddenNeuron = nn.getHiddenNeuron(random.nextInt(nn.getNumHiddenNeuron()));
-        NeuronConnection connection = hiddenNeuron.getOutput(random.nextInt(hiddenNeuron.getNumOutput()));
-        connection.setWeight(hiddenNeuron.getNumOutput() * random.nextDouble());
-        nn.flushTreeCache();
-      }
-    }
-    logger.info("Classification results: {}", results);
-    logger.info("Took {}ms", times.stream().map(l -> Long.toString(l)).collect(Collectors.joining("ms, ")));
-    logger.info("Took on average: {}ms",
-        Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).average().orElse(-1));
-    logger.info("Took on median: {}ms",
-        Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).sorted()
-            .skip((REPETITIONS-1)/2).limit(2-REPETITIONS%2).average().orElse(Double.NaN));
-  }
-
-  private static class PreparationResult {
-    OutputLayer outputLayer;
-    DoubleArrayDoubleFunction sigmoid;
-    InputNeuron activity;
-    InputNeuron brightness;
-    NeuralNetworkRoot nn;
-    HiddenNeuron[] hiddenNeurons;
-
-    PreparationResult(OutputLayer outputLayer, DoubleArrayDoubleFunction sigmoid, InputNeuron activity,
-                      InputNeuron brightness, NeuralNetworkRoot nn, HiddenNeuron[] hiddenNeurons) {
-      this.outputLayer = outputLayer;
-      this.sigmoid = sigmoid;
-      this.activity = activity;
-      this.brightness = brightness;
-      this.nn = nn;
-      this.hiddenNeurons = hiddenNeurons;
-    }
-  }
-
-  private static PreparationResult prepareNetwork() {
-    Root root = createModel();
-    Item activityItem = root.getSmartHomeEntityModel().resolveItem("activity").orElseThrow(
-        () -> new RuntimeException("Activity not found"));
-    Item brightnessItem = root.getSmartHomeEntityModel().resolveItem("brightness").orElseThrow(
-        () -> new RuntimeException("Brightness not found"));
-    NeuralNetworkRoot nn = new NeuralNetworkRoot();
-
-    DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
-
-    // input layer (2 neurons)
-    InputNeuron activity = new InputNeuron();
-    activity.setItem(activityItem);
-    InputNeuron brightness = new InputNeuron();
-    brightness.setItem(brightnessItem);
-    nn.addInputNeuron(activity);
-    nn.addInputNeuron(brightness);
-
-    OutputLayer outputLayer = new OutputLayer();
-    nn.setOutputLayer(outputLayer);
-
-    // hidden layer (10 neurons)
-    HiddenNeuron[] hiddenNeurons = new HiddenNeuron[10];
-    for (int hiddenIndex = 0; hiddenIndex < hiddenNeurons.length; hiddenIndex++) {
-      HiddenNeuron hiddenNeuron = new HiddenNeuron();
-      hiddenNeuron.setActivationFormula(sigmoid);
-      nn.addHiddenNeuron(hiddenNeuron);
-      activity.connectTo(hiddenNeuron, 1.0/2.0);
-      brightness.connectTo(hiddenNeuron, 1.0/2.0);
-    }
-    root.getMachineLearningRoot().setPreferenceLearning(nn);
-
-    return new PreparationResult(outputLayer, sigmoid, activity, brightness, nn, hiddenNeurons);
-  }
-
-  /**
-   * Purpose: Create a neural network with 3 layers (2 + 10 + 1 neurons)
-   * Sigmoid function for all layers, combinator of output is identity function
-   */
-  private static void createAndTestBrightnessNetwork() {
-    /*
-   	- Helligkeit NN:
-      - arbeitet momentan mit Zonen und nicht mit einzelnen Lampen
-      - 3 Layers
-      - Input Layer hat Neuronen (Aktivitätsnummer, Wert vom Helligkeitssensor)
-      - Hidden Layer hat 10 Neuronen
-      - Output Layer hat 1 Neuron ( Helligkeitswert)
-      - Aktivierungsfunktion: Sigmoidfunktion <- selbe für alle Layers
-     */
-    PreparationResult pr = prepareNetwork();
-    OutputNeuron output = new OutputNeuron();
-    output.setLabel("Brightness_Output");
-    output.setActivationFormula(pr.sigmoid);
-    pr.outputLayer.addOutputNeuron(output);
-    // we just have one output neuron, thus use IdentityFunction
-    pr.outputLayer.setCombinator(inputs -> inputs[0]);
-
-    for (HiddenNeuron hiddenNeuron : pr.hiddenNeurons) {
-      hiddenNeuron.connectTo(output, 1.0/pr.hiddenNeurons.length);
-    }
-
-    classifyTimed(pr.nn, NeuralNetworkRoot::internalClassify,
-        classification -> Double.toString(classification.number));
-  }
-
-  /**
-   * Purpose: Create a neural network with 3 layers (2 + 6 + 3 neurons)
-   * Sigmoid function for all layers, combinator creates RGB value in hex form
-   */
-  private static void createAndTestColorNetwork() {
-    PreparationResult pr = prepareNetwork();
-    for (int i = 0; i < 3; i++) {
-      OutputNeuron output = new OutputNeuron();
-      output.setLabel("Brightness_Output_" + i);
-      output.setActivationFormula(inputs -> Arrays.stream(inputs).sum());
-      pr.outputLayer.addOutputNeuron(output);
-    }
-    // we have three output neurons, combine them to a double value (representing RGB)
-    pr.outputLayer.setCombinator(inputs -> 65536 * Math.ceil(255.0 * inputs[0]) + 256 * Math.ceil(255.0 * inputs[1]) + Math.ceil(255.0 * inputs[0]));
-
-    for (HiddenNeuron hiddenNeuron : pr.hiddenNeurons) {
-      for (int outputIndex = 0; outputIndex < pr.outputLayer.getNumOutputNeuron(); outputIndex++) {
-        hiddenNeuron.connectTo(pr.outputLayer.getOutputNeuron(outputIndex), random.nextDouble() * 1.0/pr.hiddenNeurons.length);
-      }
-    }
-
-    classifyTimed(pr.nn, NeuralNetworkRoot::internalClassify,
-        classification -> Double.toHexString(classification.number));
-
-//    long before = System.nanoTime();
-//    DoubleNumber classification = nn.classify();
-//    long diff = System.nanoTime() - before;
-//    logger.info("Classification: {}", );
-//    logger.debug("Took {}ms", TimeUnit.NANOSECONDS.toMillis(diff));
-  }
-}
diff --git a/ml_test/src/main/resources/log4j2.xml b/ml_test/src/main/resources/log4j2.xml
deleted file mode 100644
index 686c2a889038bd7e7d89928939edfd09a5f15a94..0000000000000000000000000000000000000000
--- a/ml_test/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration>
-    <Appenders>
-        <Console name="Console">
-            <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
-        </Console>
-        <RollingFile name="RollingFile" fileName="logs/eraser.log"
-                    filePattern="logs/eraser-%i.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
-            <Policies>
-                <OnStartupTriggeringPolicy/>
-            </Policies>
-            <DefaultRolloverStrategy max="20"/>
-        </RollingFile>
-    </Appenders>
-    <Loggers>
-        <Root level="info">
-            <AppenderRef ref="Console"/>
-            <AppenderRef ref="RollingFile"/>
-        </Root>
-    </Loggers>
-</Configuration>
diff --git a/ml_test_boqi/.gitignore b/ml_test_boqi/.gitignore
deleted file mode 100644
index 70b583e34c3316bcd77c807e2d6b85db5e7d49f6..0000000000000000000000000000000000000000
--- a/ml_test_boqi/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/build/
-/bin/
-logs/
diff --git a/ml_test_boqi/build.gradle b/ml_test_boqi/build.gradle
deleted file mode 100644
index 37d78c6cf3eb148a79182bfaa806be4dcf994612..0000000000000000000000000000000000000000
--- a/ml_test_boqi/build.gradle
+++ /dev/null
@@ -1,29 +0,0 @@
-repositories {
-    mavenCentral()
-}
-
-sourceCompatibility = 1.8
-
-apply plugin: 'java'
-apply plugin: 'application'
-
-dependencies {
-    compile project(':eraser-base')
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: "${jackson_version}"
-}
-
-run {
-    mainClassName = 'de.tudresden.inf.st.eraser.ml_test_boqi.Main'
-    standardInput = System.in
-    if (project.hasProperty("appArgs")) {
-        args Eval.me(appArgs)
-    }
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/ml_test_boqi/src/main/java/de/tudresden/inf/st/eraser/ml_test_boqi/Main.java b/ml_test_boqi/src/main/java/de/tudresden/inf/st/eraser/ml_test_boqi/Main.java
deleted file mode 100644
index f42663472e3a986c82df86e4bf89d0f9ff706441..0000000000000000000000000000000000000000
--- a/ml_test_boqi/src/main/java/de/tudresden/inf/st/eraser/ml_test_boqi/Main.java
+++ /dev/null
@@ -1,219 +0,0 @@
-package de.tudresden.inf.st.eraser.ml_test_boqi;
-
-import de.tudresden.inf.st.eraser.jastadd.model.*;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-import org.apache.commons.math3.stat.StatUtils;
-import de.tudresden.inf.st.eraser.jastadd.model.Item;
-
-
-public class Main {
-
-  private static final Logger logger = LogManager.getLogger(Main.class);
-
-  public static void main(String[] args) {
-
-    logger.info("Hello World!");
-    createAndTestBrightnessNetwork();
-  }
-  private static Root createModel() {
-    Root model = Root.createEmptyRoot();
-    Group group = new Group();
-    group.setID("Group1");
-    model.addGroup(group);
-
-    // inputs items muss normalize 1.0, 0.06666666666666665, 0.4545454545454546, -0.5593220338983051, 1(bias)
-
-    NumberItem monthItem = new NumberItem();
-    monthItem.setState(-1.0);
-    monthItem.setID("month");
-    monthItem.setLabel("datetime-month");
-
-    NumberItem dayItem = new NumberItem();
-    dayItem.setState(0.2666666666666666);
-    dayItem.setID("day");
-    dayItem.setLabel("datetime-day");
-
-    NumberItem hourItem = new NumberItem();
-    hourItem.setState(-0.6363636363636364);
-    hourItem.setID("hour");
-    hourItem.setLabel("datetime-hour");
-
-    NumberItem minuteItem = new NumberItem();
-    minuteItem.setState(-0.5593220338983051);
-    minuteItem.setID("minute");
-    minuteItem.setLabel("datetime-minute");
-
-    NumberItem biasItem = new NumberItem();
-    biasItem.setState(1);
-    biasItem.setID("bias");
-    biasItem.setLabel("bias");
-
-    group.addItem(monthItem);
-    group.addItem(dayItem);
-    group.addItem(hourItem);
-    group.addItem(minuteItem);
-    group.addItem(biasItem);
-    return model;
-  }
-  private static final int REPETITIONS = 1;
-  private static void classifyTimed(
-          NeuralNetworkRoot nn,
-          Function<NeuralNetworkRoot, DoubleNumber> classify,
-          Function<DoubleNumber, String> leafToString) {
-    List<String> results = new ArrayList<>();
-    List<Long> times = new ArrayList<>();
-    long before = System.nanoTime();
-    DoubleNumber classification = classify.apply(nn);
-    long diff = System.nanoTime() - before;
-    results.add(leafToString.apply(classification));
-    times.add(TimeUnit.NANOSECONDS.toMillis(diff));
-    logger.info("Classification results: {}", results);
-    logger.info("Took {}ms", String.join("ms, ", times.stream().map(l -> Long.toString(l)).collect(Collectors.toList())));
-    logger.info("Took on average: {}ms",
-            Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).average().orElse(-1));
-    logger.info("Took on median: {}ms",
-            Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).sorted()
-                    .skip((REPETITIONS - 1) / 2).limit(2 - REPETITIONS % 2).average().orElse(Double.NaN));
-  }
-
-  /**
-   * Purpose: Create a neural network with 3 layers (5 + 8 + 4 neurons)
-   */
-  private static void createAndTestBrightnessNetwork() {
-    Root model = createModel();
-    Item monthItem = model.resolveItem("month").orElseThrow(
-            () -> new RuntimeException("Month not found"));
-    Item dayItem = model.resolveItem("day").orElseThrow(
-            () -> new RuntimeException("Day not found"));
-    Item hourItem = model.resolveItem("hour").orElseThrow(
-            () -> new RuntimeException("Hour not found"));
-    Item minuteItem = model.resolveItem("minute").orElseThrow(
-            () -> new RuntimeException("Minute not found"));
-    Item biasItem = model.resolveItem("bias").orElseThrow(
-            () -> new RuntimeException("Bias not found"));
-
-    NeuralNetworkRoot nn = new NeuralNetworkRoot();
-
-    DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
-    DoubleArrayDoubleFunction tanh= inputs ->Math.tanh(Arrays.stream(inputs).sum());
-    DoubleArrayDoubleFunction function_one= inputs->function_one();
-
-    //Weights outputs from learner Module
-    ArrayList<Double> weights= new ArrayList<Double>(Arrays.asList(
-            -4.8288886204,0.6723236931,2.1451097188,-0.8551053267,-0.7858304445,4.1369566727,-3.3096691918,
-            -0.2190980261,2.6871317298,1.2272772167,-2.5292510941,-1.2860407542,-4.2280191541,1.004752063,
-            0.8345207039,0.0123185817,-0.5921808915,0.0967336988,-0.305892589,0.5572392781,-0.7190098073,
-            -1.6247354373,0.4589248822,-0.0269816271,2.2208040852,-3.6281085698,0.2204999381,4.7263701556,
-            -4.8348948698,0.231141867,8.7120706018,-1.4912707741,0.9482851705,0.1377551973,-6.6525856465,
-            -1.321197315,-2.7369948929,17.664289214,-3.1279212743,-0.8245974167,-1.4251924355,0.8370511414,
-            2.0841638143,-0.210152817,-1.9414132298,-1.7973688846,-2.1977997794,-3.6046836685,-3.3403186721,
-            -6.1556924635,-2.8952903587,-1.0773989561,0.2300429028,-0.2184650371,0.0297181797,0.5709092417,
-            1.3960358442,-3.1577981239,0.0423944625,-17.8143314027,-1.4439317172,-0.5137688896,1.0166045804,
-            0.3059149818,1.0938282764,0.6203368549,0.702449827));
-    // input layer
-    InputNeuron month = new InputNeuron();
-    month.setItem(monthItem);
-    InputNeuron day = new InputNeuron();
-    day.setItem(dayItem);
-    InputNeuron hour = new InputNeuron();
-    hour.setItem(hourItem);
-    InputNeuron minute = new InputNeuron();
-    minute.setItem(minuteItem);
-    InputNeuron bias = new InputNeuron();
-    bias.setItem(biasItem);
-
-    nn.addInputNeuron(month);
-    nn.addInputNeuron(day);
-    nn.addInputNeuron(hour);
-    nn.addInputNeuron(minute);
-    nn.addInputNeuron(bias);
-
-    // output layer
-    OutputLayer outputLayer = new OutputLayer();
-    OutputNeuron output0 = new OutputNeuron();
-    output0.setActivationFormula(tanh);
-    OutputNeuron output1 = new OutputNeuron();
-    output1.setActivationFormula(tanh);
-    OutputNeuron output2 = new OutputNeuron();
-    output2.setActivationFormula(tanh);
-    OutputNeuron output3 = new OutputNeuron();
-    output3.setActivationFormula(tanh);
-
-    outputLayer.addOutputNeuron(output0);
-    outputLayer.addOutputNeuron(output1);
-    outputLayer.addOutputNeuron(output2);
-    outputLayer.addOutputNeuron(output3);
-
-    outputLayer.setCombinator(inputs->predictor(inputs));
-    nn.setOutputLayer(outputLayer);
-
-    // hidden layer
-    HiddenNeuron[] hiddenNeurons = new HiddenNeuron[8];
-    for (int i = 0; i < (hiddenNeurons.length); i++) {
-
-      if (i==7){
-        HiddenNeuron hiddenNeuron = new HiddenNeuron();
-        hiddenNeuron.setActivationFormula(function_one);
-        hiddenNeurons[i] = hiddenNeuron;
-        nn.addHiddenNeuron(hiddenNeuron);
-        bias.connectTo(hiddenNeuron,1.0);
-        hiddenNeuron.connectTo(output0, weights.get(i));
-        hiddenNeuron.connectTo(output1, weights.get(i+8));
-        hiddenNeuron.connectTo(output2, weights.get(i+8*2));
-        hiddenNeuron.connectTo(output3, weights.get(i+8*3));
-      }
-     else{
-        HiddenNeuron hiddenNeuron = new HiddenNeuron();
-        hiddenNeuron.setActivationFormula(tanh);
-        hiddenNeurons[i] = hiddenNeuron;
-        nn.addHiddenNeuron(hiddenNeuron);
-
-        month.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4)+i*5));
-        day.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+1)+i*5));
-        hour.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+2)+i*5));
-        minute.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+3)+i*5));
-        bias.connectTo(hiddenNeuron,weights.get((hiddenNeurons.length*4+4)+i*5));
-        hiddenNeuron.connectTo(output0, weights.get(i));
-        hiddenNeuron.connectTo(output1, weights.get(i+8));
-        hiddenNeuron.connectTo(output2, weights.get(i+8*2));
-        hiddenNeuron.connectTo(output3, weights.get(i+8*3));}
-    }
-
-    model.getMachineLearningRoot().setPreferenceLearning(nn);
-    System.out.println(model.prettyPrint());
-
-    classifyTimed(nn, NeuralNetworkRoot::classify,
-            classification -> Double.toString(classification.number));
-  }
-  private static double function_one() {
-    return 1.0;
-  }
-  private static double predictor(double[] inputs) {
-    int index=0;
-    double maxinput=StatUtils.max(inputs);
-    System.out.println(inputs);
-    for (int i = 0; i < inputs.length; i++)
-    {
-      if (inputs[i] == maxinput){
-        index=i;
-      }
-    }
-    //outputs from learner
-    ArrayList<Double> outputs= new ArrayList<Double>(Arrays.asList(2.0,1.0,3.0,0.0));
-    double output=outputs.get(index);
-    return output;
-  }
-}
-
-//inputs:
-//[BasicMLData:-1.0,0.2666666666666666,-0.6363636363636364,-0.5593220338983051]
-//outputs:
-//[BasicMLData:-0.9151867668336432,-0.1568555041251098,-0.9786996639280675,-0.9436628188408074]
-//[7, 20, 12, 13] -> predicted: 1(correct: 2)
\ No newline at end of file
diff --git a/ml_test_boqi/src/main/resources/log4j2.xml b/ml_test_boqi/src/main/resources/log4j2.xml
deleted file mode 100644
index 0594576fac98ba859e411597c90c8e3d989378bd..0000000000000000000000000000000000000000
--- a/ml_test_boqi/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration>
-    <Appenders>
-        <Console name="Console">
-            <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
-        </Console>
-        <RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
-                    filePattern="logs/jastadd-mquat-%i.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
-            <Policies>
-                <OnStartupTriggeringPolicy/>
-            </Policies>
-            <DefaultRolloverStrategy max="20"/>
-        </RollingFile>
-    </Appenders>
-    <Loggers>
-        <Root level="debug">
-            <AppenderRef ref="Console"/>
-            <AppenderRef ref="RollingFile"/>
-        </Root>
-    </Loggers>
-</Configuration>
diff --git a/org.openhab.action.machinelearn/.gitignore b/org.openhab.action.machinelearn/.gitignore
deleted file mode 100644
index 64c2056c6f34fbf226e041a575bdd73dff75df1f..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-build/
-/bin/
-logs/
diff --git a/org.openhab.action.machinelearn/META-INF/MANIFEST.MF b/org.openhab.action.machinelearn/META-INF/MANIFEST.MF
deleted file mode 100644
index 81418c31f3f28f76f0ff7cf3c7848a838cd8c2ec..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,23 +0,0 @@
-Manifest-Version: 1.0
-Private-Package: org.openhab.action.machinelearn.internal
-Ignore-Package: org.openhab.action.machinelearn.internal
-Bundle-License: http://www.eclipse.org/legal/epl-v10.html
-Bundle-Name: openHAB Machine Learning Action
-Bundle-SymbolicName: org.openhab.action.machinelearn
-Bundle-Vendor: openHAB.org
-Bundle-Version: 1.11.0.qualifier
-Bundle-Activator: org.openhab.action.machinelearn.internal.MachineLearnActivator
-Bundle-ManifestVersion: 2
-Bundle-Description: This is the Machine Learning action of the open Home Aut
- omation Bus (openHAB)
-Import-Package: org.openhab.core.scriptengine.action,
- org.osgi.framework,
- org.osgi.service.cm,
- org.osgi.service.component,
- org.slf4j
-Bundle-DocURL: http://www.openhab.org
-Bundle-RequiredExecutionEnvironment: JavaSE-1.7
-Service-Component: OSGI-INF/action.xml
-Bundle-ClassPath: .,
- lib/weka.jar
-Bundle-ActivationPolicy: lazy
diff --git a/org.openhab.action.machinelearn/OSGI-INF/action.xml b/org.openhab.action.machinelearn/OSGI-INF/action.xml
deleted file mode 100644
index d3d60bb2711d68bad6c7e223f01f5fe50fe3fd4c..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/OSGI-INF/action.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-
-    Copyright (c) 2010-2016 by the respective copyright holders.
-
-    All rights reserved. This program and the accompanying materials
-    are made available under the terms of the Eclipse Public License v1.0
-    which accompanies this distribution, and is available at
-    http://www.eclipse.org/legal/epl-v10.html
-
--->
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="activate" deactivate="deactivate" immediate="true" name="org.openhab.action.machinelearn.action">
-	<implementation class="org.openhab.action.machinelearn.internal.MachineLearnActionService" />
-
-	<service>
-		<provide interface="org.openhab.core.scriptengine.action.ActionService" />
-		<provide interface="org.osgi.service.cm.ManagedService" />
-	</service>
-
-	<property name="service.pid" type="String" value="org.openhab.machinelearn" />
-</scr:component>
diff --git a/org.openhab.action.machinelearn/README.md b/org.openhab.action.machinelearn/README.md
deleted file mode 100644
index 805bce7b3f4f8ca24bb4c1a560f74d4ba0a8731b..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/README.md
+++ /dev/null
@@ -1,111 +0,0 @@
-# OpenHAB machine learning action plugin
-
-## Installation
-
-Copy a .jar file containing the plugin into the OpenHAB's `addons` directory.
-The action will then provide several methods to use ML functionality.
-
-## How does this work?
-
-The plugin has a collection of machine learning models, of which only one is
-active at a time. New models will be created if incoming _labeled_ instances
-have a set of attributes different from what the current models have, but these
-models will be inactive, until a user-triggered re-evaluation is performed. The
-model with the best evaluation score will be made active. Incoming _labeled_
-instances also form a _training window_, which can be used for retraining the
-existing models (which can be useful to fight concept drift).
-
-## A minimal example
-
-```java
-// Read the docs about ARFF in the internet
-// Internally, this creates one model trained on the dataset
-buildFromDataset('/path/to/dataset.arff')
-
-
-// Assemble an instance that has to be predicted, somehow...
-Map<String, Double> instance = assembleInstance()
-
-
-// Get the prediction (class index if dataset was for classification
-// or the regression value if dataset was for regression).
-double result = classify(instance)
-
-
-// If anotherInstance is labeled, it will be added to the training window
-// of the created model, but the modeled will not be retrained.
-result = classify(labeledInstance)
-
-
-// Retrain all models (only one, currently) on their training windows.
-retrain()
-
-
-// If such instance is labeled, a new model with an appropriate featureset
-// will be silently created, and trained on the instance. The returned
-// prediction, however, will be made by a currently active model (the one that
-// was built from dataset) by ignoring new features and setting the values of
-// missing features to some value (zero or mean).
-result = classify(instanceWithMissingFeature_orNewFeature)
-
-
-// Both models now make predictions, but only the result of a currently active
-// model is returned
-result = classify(someUnlabeledInstance)
-
-
-// The recent performance of all models is compared, and the one with the best
-// score is made active.
-evaluate()
-```
-
-## Available public methods
-
-* `void saveModel(String path); void loadModel(String path)`
-
-Save the set of predictive models that resulted as a work of this plugin or load
-the previously saved model set.
-
-* `void buildFromDataset(String path)`
-
-If `path` contains an ARFF formated dataset, where the attribute to be predicted
-is named `label`, then the plugin will erase everything and create the set with
-one model, built from that dataset and set that model is active.
-
-* `double classify(Map<String, Double> row)`
-
-Pass a dictionary with an input instance (names of attributes mapped to their
-values in the instance). All models will make predictions for this instance, but
-only the prediction made by the currently active model will be returned. If none
-of the models is active, then an exception indicating that issue will be thrown.
-
-If an input instance has a set of attributes, that none of the trained models
-has, then the existing models will still try to predict that instance. If the
-instance lacks some attributes, that are present in the models, then the
-models will replace the resulting missing value with zero. If the instance has
-attributes that are not present in models, these will be ignored by models.
-
-If such instance has a label, though, then a new model will be created and
-trained on this single instance. The new model will be inactive until a user
-demands re-evaluation. For such models, every incoming labeled instance will be
-added to their training window, and the model will be retrained every time the
-window grows, until the training window is filled. After that retraining happens
-only on user's demand.
-
-Labeled instances will be added to the head of the training window of all the
-models, removing the instance on the tail if the window is full.
-
-* `void evaluate()`
-
-The models will store their recent prediction history of labeled instances, thus
-they are aware of their recent performance. Calling `evaluate` will set the
-model with the best recent history as an active one.
-
-* `void retrain()`
-
-Retrain all existing models on their training windows. Reset the recent history
-of all models.
-
-* `void reset()`
-
-Erase the current model set.
diff --git a/org.openhab.action.machinelearn/build.gradle b/org.openhab.action.machinelearn/build.gradle
deleted file mode 100644
index 7360d4c8b932a5805e9bd29f21960d3c67b9851e..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/build.gradle
+++ /dev/null
@@ -1,16 +0,0 @@
-dependencies {
-    compile files('lib/weka.jar')
-    compile project(':stub.org.openhab.core.scriptengine.action')
-    compile group: 'org.osgi', name: 'org.osgi.framework', version: '1.9.0'
-    compile group: 'org.osgi', name: 'org.osgi.service.cm', version: '1.6.0'
-    compile group: 'org.osgi', name: 'org.osgi.service.component', version: '1.4.0'
-    compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/org.openhab.action.machinelearn/lib/weka.jar b/org.openhab.action.machinelearn/lib/weka.jar
deleted file mode 100644
index a8d1fdb042d7d495dcb9ce966f427be87ad7b572..0000000000000000000000000000000000000000
Binary files a/org.openhab.action.machinelearn/lib/weka.jar and /dev/null differ
diff --git a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MLUnit.java b/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MLUnit.java
deleted file mode 100644
index 884bd2c8bc4a32269c49cbc3809ba32b372b9e96..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MLUnit.java
+++ /dev/null
@@ -1,133 +0,0 @@
-package org.openhab.action.machinelearn.internal;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import weka.classifiers.Classifier;
-import weka.core.Attribute;
-import weka.core.DenseInstance;
-import weka.core.Instance;
-import weka.core.Instances;
-import weka.core.converters.ConverterUtils.DataSource;
-
-//Machine Learning Model class, stores classifier and data on which it was trained
-class MLUnit implements java.io.Serializable {
-
-    private static final long serialVersionUID = -4328733681884286806L;
-    private static final double alpha = 0.1;
-    private Classifier cls;
-    private Instances data;
-
-    // How much data-history to store
-    private int capacity;
-    // How important is this classifier
-    private double weight;
-    private double history = 1.0;
-
-    protected MLUnit(String dataset, Class<? extends Classifier> clsc) throws Exception {
-        data = DataSource.read(dataset);
-        capacity = data.numInstances();
-        data.setClass(data.attribute("label"));
-        weight = 1.0;
-        cls = clsc.newInstance();
-        build();
-    }
-
-    protected MLUnit(Set<String> attNames, Class<? extends Classifier> clsc, int window) throws Exception {
-
-        // Initialize data entity (see Weka documentation)
-        ArrayList<Attribute> atts = new ArrayList<Attribute>();
-        int classIndex = -1;
-        for (String name : attNames) {
-            if (name.equals("label")) {
-                classIndex = atts.size();
-            }
-            atts.add(new Attribute(name));
-        }
-        // Throw exception if no "label" field found
-        if (classIndex == -1) {
-            throw new Exception(attNames.toString());
-        }
-        this.data = new Instances("thinkaboutit", atts, 0);
-        this.data.setClassIndex(classIndex);
-
-        // Create classifier
-        this.cls = clsc.newInstance();
-        this.capacity = window;
-        this.weight = 0.0;
-    }
-
-    private void build() throws Exception {
-        // When there is data - retrain (or train)
-        cls.buildClassifier(data);
-    }
-
-    protected Set<String> getAttributeSet() {
-        // Return set of attributes this model contains
-        Set<String> tmp = new HashSet<String>();
-        for (Attribute a : Collections.list(data.enumerateAttributes())) {
-            tmp.add(a.name());
-        }
-        tmp.add("label");
-        return tmp;
-    }
-
-    protected void setWeight(double w) {
-        this.weight = w;
-    }
-
-    protected double getWeight() {
-        return this.weight;
-    }
-
-    protected double getHistory() {
-        return this.history;
-    }
-
-    protected boolean isMatch(Set<String> keys) {
-        // Check if you can use this model for incoming data
-        return keys.equals(getAttributeSet());
-    }
-
-    protected double incoming(Map<String, Double> row) throws Exception {
-
-        // Fill in new data row
-        double[] values = new double[data.numAttributes()];
-
-        for (int i = 0; i < values.length; i++) {
-            Double num = row.get(data.attribute(i).name());
-            values[i] = num == null ? 0 : num.doubleValue();
-        }
-
-        Instance inst = new DenseInstance(1.0, values);
-        inst.setDataset(data);
-        double clsResult = cls.classifyInstance(inst);
-
-        // If data was labeled add it to the model
-        if (row.get("label") != null) {
-
-            data.add(inst);
-            double[] distro = cls.distributionForInstance(inst);
-
-            if (distro.length == 1) {
-                history = (1 - alpha) * history + alpha * Math.pow(clsResult - row.get("label"), 2);
-            } else {
-                history = (1 - alpha) * history + alpha * (clsResult == row.get("label") ? 0.0 : 1.0);
-            }
-            if (data.numInstances() < capacity) {
-                // Retrain on every instance until capacity is reached
-                // after that only on demand (e.g. sensor malfunction, concept
-                // drift)
-                build();
-            } else {
-                data.delete(0);
-            }
-        }
-
-        // Classify and return result
-        return clsResult;
-    }
-}
diff --git a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearn.java b/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearn.java
deleted file mode 100644
index f7920ab2ce839e1936ed18e462827f457d24fa62..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearn.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/**
- * Copyright (c) 2010-2016 by the respective copyright holders.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- */
-package org.openhab.action.machinelearn.internal;
-
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.openhab.core.scriptengine.action.ActionDoc;
-import org.openhab.core.scriptengine.action.ParamDoc;
-
-import weka.classifiers.functions.MultilayerPerceptron;
-import weka.classifiers.trees.J48;
-
-/**
- * This class provides static methods that can be used to trigger
- * machine learning algorithms
- *
- * @author Pavel Lutskov
- * @since 1.9.0
- */
-public class MachineLearn {
-
-    // This is the model (list of classifiers, usually one or two)
-    private static List<MLUnit> clfs = new ArrayList<MLUnit>();
-
-    // Debug string for stack traces
-    private static String dbg = "";
-
-    // Debug string for program path examination
-    private static String dbg2 = "";
-
-    // Method to trigger calculation
-    @ActionDoc(text = "Run regression on given data", returns = "regression result")
-    public static synchronized double classify(
-            @ParamDoc(name = "row", text = "instance for classification") Map<String, Double> row) throws Exception {
-
-        // If there is no model for this data-point, then create a new one
-        if (!existsMatch(row.keySet())) {
-            dbg2 += ":no match found:";
-            try {
-                dbg2 += ":try to create new unit:";
-                MLUnit cls = new MLUnit(row.keySet(), MultilayerPerceptron.class, 500);
-                clfs.add(cls);
-            } catch (Exception e) {
-                dbg2 += ":fail to create new unit:";
-                handle(e);
-            }
-        }
-
-        List<Double> results = new ArrayList<Double>();
-        double fullWeight = 0.0;
-
-        // Try classification on existing models (might be extended to ensemble learning)
-        for (MLUnit cls : clfs) {
-            try {
-                dbg2 += ":try classification:";
-                results.add(cls.incoming(row) * cls.getWeight());
-                fullWeight += cls.getWeight();
-            } catch (Exception e) {
-                dbg2 += ":failed classification:";
-                results.add(0.0);
-                handle(e);
-            }
-        }
-        if (fullWeight == 0.0) {
-            throw new Exception("Classification of the instance is impossible.");
-        }
-
-        // Result is weighted sum of outputs from all models in list (in this version active model has weight 1, and
-        // others 0)
-        dbg2 += ":end results:";
-        double result = 0.0;
-        for (double d : results) {
-            result += d;
-        }
-        // If there is no model evaluated as good, then return 0
-        return fullWeight == 0.0 ? 0.0 : result / fullWeight;
-    }
-
-    // Run evaluation to activate/deactivate present models
-    @ActionDoc(text = "Evaluate model on labeled data and see which model is the best")
-    public static synchronized void evaluate() {
-        double best = Double.POSITIVE_INFINITY;
-
-        MLUnit winner = null;
-        for (MLUnit cls : clfs) {
-            if (cls.getHistory() < best) {
-                best = cls.getHistory();
-                winner = cls;
-            }
-        }
-        if (winner != null) {
-            for (MLUnit cls : clfs) {
-                cls.setWeight(0.0);
-            }
-            winner.setWeight(1.0);
-        }
-    }
-
-    // Understand if there exists an appropriate model for incoming data-point
-    @ActionDoc(text = "Private method to realize that new model is necessary")
-    private static boolean existsMatch(Set<String> keys) {
-        boolean does = false;
-        for (MLUnit cls : clfs) {
-            dbg2 += ":checking unit for match::";
-            dbg2 += cls.getAttributeSet();
-            dbg2 += ":" + keys;
-            dbg2 += ":end checking unit:";
-            does = cls.isMatch(keys) || does;
-        }
-        return does;
-    }
-
-    // Store latest stack trace into dbg string
-    @ActionDoc(text = "Private method to handle occuring exceptions")
-    private static void handle(Exception e) {
-        e.printStackTrace();
-        StringWriter sw = new StringWriter();
-        e.printStackTrace(new PrintWriter(sw));
-        dbg = sw.toString();
-    }
-
-    // Method to retrieve debug info from rules
-    @ActionDoc(text = "Get some debug info", returns = "Debug string with the latest stack trace")
-    public static String getDebug() {
-        String tmp = dbg;
-        dbg = "";
-        return tmp;
-    }
-
-    @ActionDoc(text = "Get more debug info", returns = "More debug info")
-    public static String getDebu2() {
-        String tmp = dbg2;
-        dbg2 = "";
-        return tmp;
-    }
-
-    @ActionDoc(text = "Reset if something went wrong")
-    public static void reset() {
-        clfs = new ArrayList<MLUnit>();
-        dbg = "";
-        dbg2 = "";
-    }
-
-    @ActionDoc(text = "Build model from dataset")
-    public static void buildFromDataset(String path) {
-        clfs = new ArrayList<>();
-        try {
-            clfs.add(new MLUnit(path, J48.class));
-        } catch (Exception e) {
-            dbg2 += ":failed loading:";
-            handle(e);
-        }
-    }
-
-    @ActionDoc(text = "Save model to disk")
-    public static void saveModel(String path) {
-        try (FileOutputStream fos = new FileOutputStream(path); ObjectOutputStream oos = new ObjectOutputStream(fos)) {
-            dbg2 += ":saving model to " + path + ":";
-            oos.writeObject(clfs);
-        } catch (Exception e) {
-            dbg2 += ":failed saving:";
-            handle(e);
-        }
-    }
-
-    @ActionDoc(text = "Load previously stored model")
-    public static synchronized void loadModel(String path) {
-        try (FileInputStream fis = new FileInputStream(path); ObjectInputStream ois = new ObjectInputStream(fis)) {
-            dbg2 += ":loading model from " + path + ":";
-            clfs = (ArrayList<MLUnit>) ois.readObject();
-        } catch (Exception e) {
-            dbg2 += ":failed loading:";
-            handle(e);
-        }
-    }
-}
diff --git a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearnActionService.java b/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearnActionService.java
deleted file mode 100644
index 80ef9bb07fd782db187f606462a35414b3d09463..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearnActionService.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Copyright (c) 2010-2016 by the respective copyright holders.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- */
-package org.openhab.action.machinelearn.internal;
-
-import java.util.Dictionary;
-
-import org.openhab.core.scriptengine.action.ActionService;
-import org.osgi.service.cm.ConfigurationException;
-import org.osgi.service.cm.ManagedService;
-
-/**
- * This class registers an OSGi service for the Mail action.
- *
- * @author Kai Kreuzer
- * @since 1.3.0
- */
-public class MachineLearnActionService implements ActionService, ManagedService {
-
-    /**
-     * Indicates whether this action is properly configured which means all
-     * necessary configurations are set. This flag can be checked by the
-     * action methods before executing code.
-     */
-    /* default */ static boolean isProperlyConfigured = false;
-
-    public MachineLearnActionService() {
-    }
-
-    public void activate() {
-    }
-
-    public void deactivate() {
-        // deallocate Resources here that are no longer needed and
-        // should be reset when activating this binding again
-    }
-
-    @Override
-    public String getActionClassName() {
-        return MachineLearn.class.getCanonicalName();
-    }
-
-    @Override
-    public Class<?> getActionClass() {
-        return MachineLearn.class;
-    }
-
-    @Override
-    @SuppressWarnings("rawtypes")
-    public void updated(Dictionary config) throws ConfigurationException {
-
-    }
-}
diff --git a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearnActivator.java b/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearnActivator.java
deleted file mode 100644
index 53a0798b6c233eb2b23a12837b50d668858ee711..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/src/main/java/org/openhab/action/machinelearn/internal/MachineLearnActivator.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Copyright (c) 2010-2016 by the respective copyright holders.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- */
-package org.openhab.action.machinelearn.internal;
-
-import org.osgi.framework.BundleActivator;
-import org.osgi.framework.BundleContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Extension of the default OSGi bundle activator
- *
- * @author Kai Kreuzer
- * @since 1.3.0
- */
-public final class MachineLearnActivator implements BundleActivator {
-
-    private static Logger logger = LoggerFactory.getLogger(MachineLearnActivator.class);
-
-    private static BundleContext context;
-
-    /**
-     * Called whenever the OSGi framework starts our bundle
-     */
-    @Override
-    public void start(BundleContext bc) throws Exception {
-        context = bc;
-        logger.debug("Machine Learning action has been started.");
-    }
-
-    /**
-     * Called whenever the OSGi framework stops our bundle
-     */
-    @Override
-    public void stop(BundleContext bc) throws Exception {
-        context = null;
-        logger.debug("Machine Learning action has been stopped.");
-    }
-
-    /**
-     * Returns the bundle context of this bundle
-     *
-     * @return the bundle context
-     */
-    public static BundleContext getContext() {
-        return context;
-    }
-
-}
diff --git a/org.openhab.action.machinelearn/src/main/resources/readme.txt b/org.openhab.action.machinelearn/src/main/resources/readme.txt
deleted file mode 100644
index 98698c670dc399dac76f7a173160545eba8f01c1..0000000000000000000000000000000000000000
--- a/org.openhab.action.machinelearn/src/main/resources/readme.txt
+++ /dev/null
@@ -1 +0,0 @@
-Bundle resources go in here!
\ No newline at end of file
diff --git a/org.openlicht.action.reinforcementlearning/.gitignore b/org.openlicht.action.reinforcementlearning/.gitignore
deleted file mode 100644
index 64c2056c6f34fbf226e041a575bdd73dff75df1f..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-build/
-/bin/
-logs/
diff --git a/org.openlicht.action.reinforcementlearning/META-INF/MANIFEST.MF b/org.openlicht.action.reinforcementlearning/META-INF/MANIFEST.MF
deleted file mode 100644
index f7583bb18d636fec33238e5fd571732ef171cf48..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,27 +0,0 @@
-Manifest-Version: 1.0
-Private-Package: org.openhab.action.helloworld.internal
-Ignore-Package: org.openhab.action.helloworld.internal
-Bundle-License: http://www.eclipse.org/legal/epl-v10.html
-Bundle-Name: openLicht Reinforcement Learning Action
-Bundle-SymbolicName: org.openlicht.action.reinforcementlearning
-Bundle-Version: 1.9.0.qualifier
-Bundle-Activator: org.openlicht.action.reinforcementlearning.internal.MainActivator
-Bundle-ManifestVersion: 2
-Bundle-Description: This is the Hello World action of the open Home Aut
- omation Bus (openHAB)
-Import-Package: org.apache.commons.lang,
- org.eclipse.jdt.annotation,
- org.openhab.core.library.types,
- org.openhab.core.scriptengine.action,
- org.osgi.framework,
- org.osgi.service.cm,
- org.osgi.service.component,
- org.slf4j
-Bundle-DocURL: http://www.openhab.org
-Bundle-RequiredExecutionEnvironment: JavaSE-1.8
-Service-Component: OSGI-INF/action.xml
-Bundle-ClassPath: .,
- lib/encog-core-3.4.jar
-Bundle-ActivationPolicy: lazy
-Automatic-Module-Name: org.openlicht.action.reinforcementlearning
-Require-Bundle: org.eclipse.smarthome.core
diff --git a/org.openlicht.action.reinforcementlearning/OSGI-INF/action.xml b/org.openlicht.action.reinforcementlearning/OSGI-INF/action.xml
deleted file mode 100644
index ded8de6584af4ef69343ea2d29339b258eb26fba..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/OSGI-INF/action.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-
-    Copyright (c) 2010-2016 by the respective copyright holders.
-
-    All rights reserved. This program and the accompanying materials
-    are made available under the terms of the Eclipse Public License v1.0
-    which accompanies this distribution, and is available at
-    http://www.eclipse.org/legal/epl-v10.html
-
--->
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="activate" deactivate="deactivate" immediate="true" name="org.openlicht.action.reinforcementlearning.action">
-	<implementation class="org.openlicht.action.reinforcementlearning.internal.MainActionService" />
-
-	<service>
-		<provide interface="org.openhab.core.scriptengine.action.ActionService" />
-		<provide interface="org.osgi.service.cm.ManagedService" />
-	</service>
-
-	<property name="service.pid" type="String" value="org.openhab.helloworld" />
-	
-</scr:component>
diff --git a/org.openlicht.action.reinforcementlearning/build.gradle b/org.openlicht.action.reinforcementlearning/build.gradle
deleted file mode 100644
index 572c87ea95295ba93b68e1f8bae5bed2dcc0149d..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/build.gradle
+++ /dev/null
@@ -1,17 +0,0 @@
-dependencies {
-    compile files('lib/encog-core-3.4.jar')
-    compile project(':stub.org.openhab.core.scriptengine.action')
-    compile group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.annotation', version: '2.2.200'
-    compile group: 'org.osgi', name: 'org.osgi.framework', version: '1.9.0'
-    compile group: 'org.osgi', name: 'org.osgi.service.cm', version: '1.6.0'
-    compile group: 'org.osgi', name: 'org.osgi.service.component', version: '1.4.0'
-    compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'
-}
-
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/org.openlicht.action.reinforcementlearning/lib/encog-core-3.4.jar b/org.openlicht.action.reinforcementlearning/lib/encog-core-3.4.jar
deleted file mode 100644
index e78ab98f8f50dfb7855b472ba33d4c09f4fe6218..0000000000000000000000000000000000000000
Binary files a/org.openlicht.action.reinforcementlearning/lib/encog-core-3.4.jar and /dev/null differ
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/Main.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/Main.java
deleted file mode 100644
index 900fcfe6b14a4377be5cc2b405df22b0dd2daa86..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/Main.java
+++ /dev/null
@@ -1,288 +0,0 @@
-
-/**
- * Copyright (c) 2010-2016 by the respective copyright holders.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- */
-package org.openlicht.action.reinforcementlearning.internal;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.HashMap;
-
-import org.openhab.core.scriptengine.action.ActionDoc;
-import org.openhab.core.scriptengine.action.ParamDoc;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This class provides static methods that can be used in automation rules
- * for using Reinforcement Learning methods and conect to OpenHab.
- *
- * @author David Morales Rodríguez
- *
- *
- */
-public class Main {
-
-    private static int N_LAMPS = 3;
-    private static int N_VALUES = 3;
-    private static ReinforcementLearningAlgorithm rl_algorithm = null;
-    private static Logger logger = LoggerFactory.getLogger(Main.class);
-
-    // method to change string to double. Used to get naturalLight (home I/O uses "," for float numbers)
-    private static double changeStringToDouble(String string_number) {
-        double number;
-        int endIndex = string_number.indexOf(",");
-        if (endIndex > 0) {
-            number = Double.parseDouble(string_number.substring(0, endIndex));
-            if (endIndex < string_number.length() - 1) {
-                String decimal_string = string_number.substring(endIndex + 1, string_number.length());
-                double decimal = Double.parseDouble(decimal_string);
-                decimal = decimal / (Math.pow(10, decimal_string.length()));
-                number = number + decimal;
-
-            }
-
-        } else {
-            number = Double.parseDouble(string_number);
-        }
-        return number;
-
-    }
-
-    // Method to init the model
-    @ActionDoc(text = "Test Method for building the model")
-    public static synchronized void initModel() throws IOException {
-
-        logger.warn("Initializing model.");
-
-        if (rl_algorithm == null) {
-            trainAndSave();
-
-        } else {
-            logger.warn("model was already initialized.");
-
-        }
-
-    }
-
-    // Method to init the model loading it
-    @ActionDoc(text = "Test Method for building the model")
-    public static synchronized void initModel(@ParamDoc(name = "f_nnRgb") String f_nnRgb,
-            @ParamDoc(name = "f_nnBright") String f_nnBright) throws IOException {
-
-        logger.warn("Initializing model.");
-
-        if (rl_algorithm == null) {
-            rl_algorithm = new ReinforcementLearningAlgorithm(N_LAMPS, N_VALUES, f_nnRgb, f_nnBright);
-
-        } else {
-            logger.warn("model was already initialized.");
-
-        }
-    }
-
-    // Method to save the model in the following files
-    // file for nn rgb: "\\openhab-2.3.0\\datasets\\RGB_NN"
-    // file for nn bright: "\\openhab-2.3.0\\datasets\\Bright_NN"
-    @ActionDoc(text = "Test Method for saving the model")
-    public static synchronized void saveModel() throws IOException {
-        logger.warn("saving model.");
-        rl_algorithm.saveModel();
-        logger.warn("model was saved");
-
-    }
-
-    // Method to get the plan for the current state(activity, natural light)
-    @ActionDoc(text = "Test Method for getting the plan", returns = "plan")
-    public static synchronized HashMap<String, Double> planBright(@ParamDoc(name = "activity") int activity,
-            @ParamDoc(name = "natural_light") String natural_light) throws IOException {
-
-        if (rl_algorithm == null) {
-            initModel();
-        }
-
-        // natural light from 0 to 10 in HOME IO, from 0 to 100 for us
-        double light = changeStringToDouble(natural_light) * 10;
-        State state = new State(activity, light);
-
-        logger.warn("getting plan for activity: " + Integer.toString(activity) + " natural light: "
-                + Double.toString(light));
-
-        // get plan using RL
-        double aux_plan = rl_algorithm.planBright(state);
-
-        // construct hashMap to return
-        HashMap<String, Double> plan = new HashMap<String, Double>();
-
-        plan.put("lamp" + Integer.toString(activity), aux_plan);
-
-        return plan;
-    }
-
-    // training method, input: activity, natural light, ideal_value
-    @ActionDoc(text = "Test Method for training")
-    public static synchronized void trainBright(@ParamDoc(name = "activity") int activity,
-            @ParamDoc(name = "natural_light") String natural_light, @ParamDoc(name = "ideal_light") double ideal_light)
-            throws IOException {
-
-        if (rl_algorithm == null) {
-            initModel();
-        }
-
-        logger.warn("training algorithm");
-        double ideal = ideal_light;
-        // natural light from 0 to 10 in HOME IO, from 0 to 100 for us
-        State state = new State(activity, changeStringToDouble(natural_light) * 10);
-        rl_algorithm.trainAlgorithmBright(state, ideal);
-
-    }
-
-    // method for training the model using datasets and then save the model
-    private static void trainAndSave() {
-
-        rl_algorithm = new ReinforcementLearningAlgorithm(N_LAMPS, N_VALUES);
-
-        String dataset = "\\openhab-2.3.0\\datasets\\dataset.txt";
-        String output = "\\openhab-2.3.0\\datasets\\output.txt";
-
-        String line;
-        // read dataset to train our model
-        try {
-            FileReader f = new FileReader(dataset);
-            BufferedReader b = new BufferedReader(f);
-
-            logger.warn("reading dataset.");
-            FileWriter f_output = new FileWriter(output);
-            BufferedWriter bw = new BufferedWriter(f_output);
-
-            // we don´t need the first line (header)
-            line = b.readLine();
-
-            // read data
-            double lamps_values[] = new double[N_VALUES];
-            // double[] planRGB;
-            double planBright;
-            int contador = 0;
-            while ((line = b.readLine()) != null) {
-                ++contador;
-                int index = line.indexOf(" ");
-                int activity = Integer.parseInt((line.substring(0, index)));
-                index = index + 1;
-                int n_index = line.indexOf(" ", index);
-                double natural_light = Double.parseDouble(line.substring(index, n_index));
-                double ideal_bright;
-                index = n_index;
-                State state = new State(activity, natural_light);
-
-                index = index + 2;
-                int last_index = index;
-
-                for (int j = 0; j < N_VALUES; j++) {
-
-                    if (j == 2) {
-                        index = line.indexOf("]", index);
-                    } else {
-                        index = line.indexOf(",", index);
-                    }
-
-                    lamps_values[j] = Integer.parseInt((line.substring(last_index, index)));
-                    index = index + 2;
-                    last_index = index;
-                }
-
-                index = line.length();
-                ideal_bright = Double.parseDouble(line.substring(last_index, index));
-                // before training
-
-                logger.warn("Before Training");
-                // planRGB = rl_algorithm.planRGB(state);
-                planBright = rl_algorithm.planBright(state);
-                logger.warn("natural light " + natural_light + " activity: " + activity + " light plan: "
-                        + Double.toString(planBright));
-                logger.warn("user values: " + Double.toString(ideal_bright));
-
-                // training the algorithm
-                // rl_algorithm.trainAlgorithmRGB(state, lamps_values);
-                rl_algorithm.trainAlgorithmBright(state, ideal_bright);
-                logger.warn("After Training");
-                planBright = rl_algorithm.planBright(state);
-                logger.warn("natural light " + natural_light + " activity: " + activity + " light plan: "
-                        + Double.toString(planBright));
-                logger.warn("user values: " + Double.toString(ideal_bright));
-
-            }
-            bw.close();
-            f_output.close();
-            b.close();
-            f.close();
-            ;
-
-        } catch (Exception e) {
-            logger.error("Exception occurred during execution: {}", e.getMessage(), e);
-        }
-        rl_algorithm.saveModel();
-
-    }
-
-    /*
-     * @ActionDoc(text = "Test Method for training", returns = "algorithm trained")
-     * public static synchronized void trainRGB(@ParamDoc(name = "activity") int activity,
-     *
-     * @ParamDoc(name = "bright") String bright, @ParamDoc(name = "configuration") HSBType configuration)
-     * throws IOException {
-     * double[] users_lamps_values = new double[3];
-     *
-     * Color color = new Color(configuration.getRGB());
-     * users_lamps_values[0] = color.getRed();
-     * users_lamps_values[1] = color.getGreen();
-     * users_lamps_values[2] = color.getBlue();
-     *
-     * logger.warn("lamps values given by user: {} {} {}", Double.toString(users_lamps_values[0]),
-     * Double.toString(users_lamps_values[1]), Double.toString(users_lamps_values[2]));
-     * if (rl_algorithm == null) {
-     * initModel();
-     * }
-     * logger.warn("training algorithm");
-     *
-     * State state = new State(activity, Double.parseDouble(bright)*10);
-     * rl_algorithm.trainAlgorithmRGB(state, users_lamps_values);
-     *
-     * }
-     *
-     *
-     *
-     * @ActionDoc(text = "Test Method for getting the plan", returns = "plan")
-     * public static synchronized HashMap<String, HSBType> planRGB() throws IOException {
-     *
-     * if (rl_algorithm == null) {
-     * initModel();
-     * }
-     * logger.warn("getting plan");
-     * HashMap<String, HSBType> plan = new HashMap<String, HSBType>();
-     *
-     * double[] lamps_values = rl_algorithm.planRGB(state);
-     *
-     * for (int i = 0; i < N_LAMPS; i++) {
-     * int red = (int) Math.round(lamps_values[i][0]);
-     * int green = (int) Math.round(lamps_values[i][1]);
-     * int blue = (int) Math.round(lamps_values[i][2]);
-     * logger.warn("lamps values given by plan function: {} {} {}", red, green, blue);
-     * HSBType configuration = HSBType.fromRGB(red, green, blue);
-     * configuration.toRGB();
-     * plan.put("lamp" + Integer.toString(i), configuration);
-     *
-     * }
-     * return plan;
-     *
-     * }
-     */
-
-}
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MainActionService.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MainActionService.java
deleted file mode 100644
index d29584aedca854d07e321fc23c41f6e845105c8b..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MainActionService.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Copyright (c) 2010-2016 by the respective copyright holders.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- */
-package org.openlicht.action.reinforcementlearning.internal;
-
-import java.util.Dictionary;
-
-import org.openhab.core.scriptengine.action.ActionService;
-import org.osgi.service.cm.ConfigurationException;
-import org.osgi.service.cm.ManagedService;
-
-/**
- * This class registers an OSGi service for the Main action.
- *
- * @author David Morales Rodríguez
- */
-public class MainActionService implements ActionService, ManagedService {
-
-    /**
-     * Indicates whether this action is properly configured which means all
-     * necessary configurations are set. This flag can be checked by the
-     * action methods before executing code.
-     */
-    /* default */ static boolean isProperlyConfigured = false;
-
-    public MainActionService() {
-    }
-
-    public void activate() {
-    }
-
-    public void deactivate() {
-        // deallocate Resources here that are no longer needed and
-        // should be reset when activating this binding again
-    }
-
-    @Override
-    public String getActionClassName() {
-        return Main.class.getCanonicalName();
-    }
-
-    @Override
-    public Class<?> getActionClass() {
-        return Main.class;
-    }
-
-    @Override
-    @SuppressWarnings("rawtypes")
-    public void updated(Dictionary config) throws ConfigurationException {
-
-    }
-
-}
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MainActivator.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MainActivator.java
deleted file mode 100644
index 98b2c764bae1e1ad790b44baeb3ab1c2529f44bd..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MainActivator.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Copyright (c) 2010-2016 by the respective copyright holders.
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- */
-package org.openlicht.action.reinforcementlearning.internal;
-
-import org.osgi.framework.BundleActivator;
-import org.osgi.framework.BundleContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Extension of the default OSGi bundle activator
- *
- * @author David Morales Rodríguez
- */
-public final class MainActivator implements BundleActivator {
-
-    private static Logger logger = LoggerFactory.getLogger(MainActivator.class);
-
-    private static BundleContext context;
-
-    /**
-     * Called whenever the OSGi framework starts our bundle
-     */
-    @Override
-    public void start(BundleContext bc) throws Exception {
-        context = bc;
-        logger.debug("Main action has been started.");
-    }
-
-    /**
-     * Called whenever the OSGi framework stops our bundle
-     */
-    @Override
-    public void stop(BundleContext bc) throws Exception {
-        context = null;
-        logger.debug("Main action has been stopped.");
-    }
-
-    /**
-     * Returns the bundle context of this bundle
-     *
-     * @return the bundle context
-     */
-    public static BundleContext getContext() {
-        return context;
-    }
-
-}
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/Model.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/Model.java
deleted file mode 100644
index 9da77cadfcd9672a93cc92ba8d442b4b8504985c..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/Model.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package org.openlicht.action.reinforcementlearning.internal;
-
-import java.util.Random;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-//abstract class Model
-public abstract class Model {
-    protected int N_VALUES;
-    protected int N_LAMPS;
-    protected Random rnd = new Random();
-    protected static Logger logger = LoggerFactory.getLogger(Model.class);
-
-    public Model(int N_LAMPS, int N_VALUES) {
-        this.N_LAMPS = N_LAMPS;
-        this.N_VALUES = N_VALUES;
-        rnd.setSeed(System.currentTimeMillis());
-    }
-
-    // method to validate lamps values (RGB)
-    protected double[] validateLampsValues(double[] l_values) {
-        for (int i = 0; i < this.N_VALUES; ++i) {
-            if (l_values[i] > 255) {
-                l_values[i] = 255;
-            }
-
-            if (l_values[i] < 0) {
-                l_values[i] = 0;
-            }
-
-        }
-        return l_values;
-    }
-
-    // method to validate bright value
-    protected double validateBrightValue(double bright_value) {
-
-        if (bright_value > 100) {
-            bright_value = 100;
-        }
-
-        if (bright_value < 0) {
-            bright_value = 0;
-        }
-
-        return bright_value;
-    }
-
-    // training the model for the current state using the values given by the user
-    public abstract void trainBright(State state, double bright);
-
-    // training the model for the current state using the values given by the user
-    public abstract void trainRGB(State state, double[] user_lamps_values);
-
-    // get output (lams_values (RGB)) using exploitation
-    public abstract double[] getPlanRGB(State state);
-
-    // get output (lams_values (Bright)) using exploitation
-    public abstract double getPlanBright(State state);
-
-    // method to save the model
-    public abstract void saveModel();
-
-}
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MultilayerPerceptron_Model.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MultilayerPerceptron_Model.java
deleted file mode 100644
index 5ee2ed452a837a84d28e3c9887facb1bb9b5a5d0..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/MultilayerPerceptron_Model.java
+++ /dev/null
@@ -1,236 +0,0 @@
-package org.openlicht.action.reinforcementlearning.internal;
-
-import java.io.File;
-
-import org.encog.engine.network.activation.ActivationSigmoid;
-import org.encog.ml.data.MLDataSet;
-import org.encog.ml.data.basic.BasicMLDataSet;
-import org.encog.ml.train.MLTrain;
-import org.encog.neural.networks.BasicNetwork;
-import org.encog.neural.networks.layers.BasicLayer;
-import org.encog.neural.networks.training.propagation.back.Backpropagation;
-import org.encog.persist.EncogDirectoryPersistence;
-
-//This class implement a Rl model using two Neural networks build using encog
-public class MultilayerPerceptron_Model extends Model {
-
-    // first NN to get rgb configuration
-    private BasicNetwork RGB_network;
-    // second NN to get bright configuration
-    private BasicNetwork Bright_network;
-
-    // constructor
-    public MultilayerPerceptron_Model(int N_LAMPS, int N_VALUES) {
-
-        super(N_LAMPS, N_VALUES);
-
-        // NN to predict RGB values
-        this.RGB_network = new BasicNetwork();
-
-        // no activation function,true->bias neurons, input layer has 2 neurons (activity, natural_light)
-        this.RGB_network.addLayer(new BasicLayer(null, true, 2));
-
-        // Sigmoid activation function,true->bias neurons, hidden layer has 6 neurons
-        this.RGB_network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
-
-        // Sigmoid activation function,false->bias neurons, output layer has 3 neurons (RGB values)
-        this.RGB_network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 3));
-        this.RGB_network.getStructure().finalizeStructure();
-        this.RGB_network.reset();
-
-        // NN to predict bright
-        this.Bright_network = new BasicNetwork();
-
-        // no activation function,true->bias neurons, input layer has 2 neurons (activity, natural_light)
-        this.Bright_network.addLayer(new BasicLayer(null, true, 2));
-
-        // Sigmoid activation function,true->bias neurons, hidden layer has 10 neurons
-        this.Bright_network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 10));
-
-        // Sigmoid activation function,false->bias neurons, output layer has 1 neurons (Bright value)
-        this.Bright_network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
-        this.Bright_network.getStructure().finalizeStructure();
-        this.Bright_network.reset();
-
-    }
-
-    // constructor with files to initialize NNs
-    public MultilayerPerceptron_Model(int N_LAMPS, int N_VALUES, String file_NNrgb, String file_NNbright) {
-
-        super(N_LAMPS, N_VALUES);
-        logger.warn("trying to load nn");
-        this.RGB_network = (BasicNetwork) EncogDirectoryPersistence.loadObject(new File(file_NNrgb));
-        this.Bright_network = (BasicNetwork) EncogDirectoryPersistence.loadObject(new File(file_NNbright));
-        logger.warn("nn initialized");
-
-    }
-
-    // method to save the model
-    @Override
-    public void saveModel() {
-
-        // save model
-        EncogDirectoryPersistence.saveObject(new File("\\openhab-2.3.0\\datasets\\RGB_NN"), this.RGB_network);
-
-        EncogDirectoryPersistence.saveObject(new File("\\openhab-2.3.0\\datasets\\Bright_NN"), this.Bright_network);
-
-    }
-
-    /////////////////////////////////////// aux
-    /////////////////////////////////////// functions////////////////////////////////////////////////////////////////
-
-    // function to normalize values. NN get values for a sigmoid function
-    private double normalizeActivityPlanRGB(int activity) {
-        return (activity - 2) * 1;
-
-    }
-
-    // function to normalize values. NN get values for a sigmoid function
-    private double normalizeNatural_lightPlanRGB(double natural_light) {
-        return ((natural_light - 50) / 8);
-    }
-
-    // function to normalize values. NN get values for a sigmoid function
-    private double normalizeActivityPlanBright(int activity) {
-        return (activity - 2) * 1;
-
-    }
-
-    // function to normalize values. NN get values for a sigmoid function
-    private double normalizeNatural_lightPlanBright(double natural_light) {
-        return ((natural_light - 50) / 8);
-    }
-
-    ///////////////////////////////// train methods/////////////////////////////////////////////////////////////////
-
-    // train model for bright prediction
-    @Override
-    public void trainBright(State state, double ideal_bright) {
-        double[][] IDEAL = new double[1][1];
-        // scale for the NN
-        IDEAL[0][0] = ideal_bright / 100;
-
-        // normalize values to use sigmod functions
-        double activity_lamp = this.normalizeActivityPlanBright(state.getActivity());
-        double natural_light = this.normalizeNatural_lightPlanBright(state.getNatural_light());
-
-        double[][] INPUT = new double[1][2];
-        INPUT[0][0] = activity_lamp;
-        INPUT[0][1] = natural_light;
-
-        // training using backpropagation
-        MLDataSet trainingSet = new BasicMLDataSet(INPUT, IDEAL);
-        // learning rate of 3.5 and a momentum of 0.3
-        MLTrain train = new Backpropagation(this.Bright_network, trainingSet, 3.5, 0.3);
-        int epoch = 1;
-        do {
-            train.iteration();
-            // System.out.println("Epoch #" + epoch + "Error:" + train.getError());
-            epoch++;
-        } while (train.getError() > 0.005);
-
-        train.finishTraining();
-
-        // test the neural network
-        // System.out.println("Bright Neural Network Results:");
-        /*
-         * for (MLDataPair pair : trainingSet) {
-         * final MLData output = this.Bright_network.compute(pair.getInput());
-         * System.out.println(
-         * "INPUT= [" + pair.getInput().getData(0) + ", " + pair.getInput().getData(1) + "], actual= ["
-         * + output.getData(0) * 100 + "] ,ideal= [" + pair.getIdeal().getData(0) * 100 + "]");
-         * }
-         */
-    }
-
-    @Override
-    public void trainRGB(State state, double[] user_lamps_values) {
-
-        double[][] IDEAL = new double[1][3];
-
-        // from RGB to %R %G %B
-        for (int i = 0; i < this.N_VALUES; i++) {
-            IDEAL[0][i] = user_lamps_values[i] / 255;
-        }
-
-        double activity_lamp = this.normalizeActivityPlanRGB(state.getActivity());
-        double natural_light = this.normalizeNatural_lightPlanRGB(state.getNatural_light());
-        double[][] INPUT = new double[1][2];
-        INPUT[0][0] = activity_lamp;
-        INPUT[0][1] = natural_light;
-
-        // training using backpropagation
-        MLDataSet trainingSet = new BasicMLDataSet(INPUT, IDEAL);
-        // learning rate of 3.5 and a momentum of 0.3
-        MLTrain train = new Backpropagation(this.RGB_network, trainingSet, 3.5, 0.3);
-        int epoch = 1;
-        do {
-            train.iteration();
-            System.out.println("Epoch #" + epoch + "Error:" + train.getError());
-            epoch++;
-        } while (train.getError() > 0.005);
-
-        train.finishTraining();
-
-        // test the neural network
-        /*
-         * System.out.println("RGB Neural Network Results:");
-         * for (MLDataPair pair : trainingSet) {
-         * final MLData output = this.RGB_network.compute(pair.getInput());
-         * System.out.println("INPUT= [" + pair.getInput().getData(0) + ", " + pair.getInput().getData(1)
-         * + "], actual= [" + output.getData(0) * 255 + ", " + output.getData(1) * 255 + ", "
-         * + output.getData(2) * 255 + "] ,ideal= [" + pair.getIdeal().getData(0) * 255 + ", "
-         * + pair.getIdeal().getData(1) * 255 + ", " + pair.getIdeal().getData(2) * 255 + "]");
-         * }
-         */
-    }
-
-    ///////////////////////////////////////////// exploitation
-    ///////////////////////////////////////////// methods///////////////////////////////////////////////////////////////////
-
-    // exploitation method for RGB values
-    @Override
-    public double[] getPlanRGB(State state) {
-
-        double[] output = new double[this.N_VALUES];
-
-        // input must be an array, network.compute(input,output)...
-        double[] input = new double[2];
-        input[0] = this.normalizeActivityPlanRGB(state.getActivity());
-        input[1] = this.normalizeNatural_lightPlanBright(state.getNatural_light());
-
-        // get output
-        this.RGB_network.compute(input, output);
-
-        // to RGB
-        for (int i = 0; i < this.N_VALUES; i++) {
-            output[i] = output[i] * 255;
-        }
-
-        return output;
-
-    }
-
-    // exploitation method for Bright values
-    @Override
-    public double getPlanBright(State state) {
-
-        double aux[] = new double[1];
-        double output;
-
-        // input must be an array, network.compute(input,output)...
-        double[] input = new double[2];
-        input[0] = this.normalizeActivityPlanBright(state.getActivity());
-        input[1] = this.normalizeNatural_lightPlanBright(state.getNatural_light());
-
-        // get output
-        this.Bright_network.compute(input, aux);
-
-        // to bright
-        output = validateBrightValue(100 * aux[0]);
-
-        return output;
-
-    }
-
-}
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/RLAgent.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/RLAgent.java
deleted file mode 100644
index dd78a6052a3eb48ae56c5eff470432396afd2769..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/RLAgent.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package org.openlicht.action.reinforcementlearning.internal;
-
-import java.util.Random;
-
-/*This class defines a RL agent*/
-public class RLAgent {
-    private Random rnd = new Random();
-
-    // constant to do exploration
-    private static final double EPSILON = 0;
-
-    // variables that define our environment
-    // n_lamps: n° lamps
-    // n_values: n° values for each lamp
-    private int N_LAMPS;
-    private int N_VALUES;
-
-    private MultilayerPerceptron_Model model;
-
-    // method to save the model
-    public void saveModel() {
-        this.model.saveModel();
-    }
-
-    // contructor withoud initialized NN files
-    public RLAgent(int n_lamps, int n_values) {
-        rnd.setSeed(System.currentTimeMillis());
-        this.N_LAMPS = n_lamps;
-        this.N_VALUES = n_values;
-        model = new MultilayerPerceptron_Model(this.N_LAMPS, this.N_VALUES);
-    }
-
-    // constructor with initialized NN files
-    public RLAgent(int n_lamps, int n_values, String file_NNrgb, String file_NNbright) {
-        rnd.setSeed(System.currentTimeMillis());
-        this.N_LAMPS = n_lamps;
-        this.N_VALUES = n_values;
-        model = new MultilayerPerceptron_Model(this.N_LAMPS, this.N_VALUES, file_NNrgb, file_NNbright);
-    }
-
-    // method call from reinforcementLearning class to train the agent
-    // last_state: last state given
-    // ideal bright values
-    public void trainingAlgorithmBright(State state, double ideal_bright) {
-
-        // TRAIN THE SYSTEM
-        model.trainBright(state, ideal_bright);
-    }
-
-    // method call from reinforcementLearning class to train the agent
-    // last_state: last state given
-    // lamps_values: lamps values the algorithm gave back
-    public void trainingAlgorithmRGB(State state, double[] user_lamps_values) {
-
-        // TRAIN THE SYSTEM
-        model.trainRGB(state, user_lamps_values);
-    }
-
-    // method call from reinforcementLearning class to get a configuration for the lamps (RGB)
-    public double[] getPlanRGB(State state) {
-
-        if (decide_if_exploration()) {// do exploration
-            // not implemented
-            return null;
-        } else {// do exploitation
-            return model.getPlanRGB(state);
-        }
-
-    }
-
-    // method call from reinforcementLearning class to get a configuration for the lamps (RGB)
-    public double getPlanBright(State state) {
-
-        if (decide_if_exploration()) {// do exploration
-            // not implemented
-            return -1;
-        } else {// do exploitation
-
-            return model.getPlanBright(state);
-        }
-
-    }
-
-    // method to decide if we do exploration
-    private boolean decide_if_exploration() {
-
-        boolean exploration = false;
-        if (rnd.nextFloat() < EPSILON) {
-            exploration = true;
-        }
-
-        return exploration;
-
-    }
-
-}
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/ReinforcementLearningAlgorithm.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/ReinforcementLearningAlgorithm.java
deleted file mode 100644
index ffa9ad2163642e48a377f320573bed0dff34b3f5..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/ReinforcementLearningAlgorithm.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package org.openlicht.action.reinforcementlearning.internal;
-
-/*This class implement a reinforcement learning algorithm */
-public class ReinforcementLearningAlgorithm {
-
-    // reinforcement learning agent
-    private RLAgent agent;
-    private int N_LAMPS;
-    private int N_VALUES;
-
-    // constructor without initialized NN files
-    public ReinforcementLearningAlgorithm(int n_lamps, int n_values) {
-
-        this.N_LAMPS = n_lamps;
-        this.N_VALUES = n_values;
-
-        agent = new RLAgent(N_LAMPS, N_VALUES);
-    }
-
-    // constructor with initialized NN files
-    public ReinforcementLearningAlgorithm(int n_lamps, int n_values, String file_NNrgb, String file_NNbright) {
-
-        this.N_LAMPS = n_lamps;
-        this.N_VALUES = n_values;
-
-        agent = new RLAgent(N_LAMPS, N_VALUES, file_NNrgb, file_NNbright);
-    }
-
-    // we get the feedback from the user and we train our algorithm with it
-    public void trainAlgorithmRGB(State state, double[] users_lamps_values) {
-        this.agent.trainingAlgorithmRGB(state, users_lamps_values);
-    }
-
-    // we get the feedback from the user and we train our algorithm with it
-    public void trainAlgorithmBright(State state, double ideal_bright) {
-        this.agent.trainingAlgorithmBright(state, ideal_bright);
-    }
-
-    // get plan (RGB)
-    public double[] planRGB(State state) {
-        return this.agent.getPlanRGB(state);
-
-    }
-
-    // get plan (Bright)
-    public double planBright(State state) {
-        return this.agent.getPlanBright(state);
-
-    }
-
-    // method to save the model
-    public void saveModel() {
-        this.agent.saveModel();
-    }
-}
\ No newline at end of file
diff --git a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/State.java b/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/State.java
deleted file mode 100644
index 5b0e00b93f22bfef8a4616680dfe92fb6ebc0966..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/java/org/openlicht/action/reinforcementlearning/internal/State.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package org.openlicht.action.reinforcementlearning.internal;
-
-//class to define the state(activity, natural light).
-public class State {
-    // user´s activity
-    private int activity;
-    private double natural_light;
-
-    public State(int activity, double natural_light) {
-        this.natural_light = natural_light;
-        this.activity = activity;
-
-    }
-
-    public int getActivity() {
-        return this.activity;
-
-    }
-
-    public double getNatural_light() {
-        return this.natural_light;
-    }
-}
\ No newline at end of file
diff --git a/org.openlicht.action.reinforcementlearning/src/main/resources/readme.txt b/org.openlicht.action.reinforcementlearning/src/main/resources/readme.txt
deleted file mode 100644
index 98698c670dc399dac76f7a173160545eba8f01c1..0000000000000000000000000000000000000000
--- a/org.openlicht.action.reinforcementlearning/src/main/resources/readme.txt
+++ /dev/null
@@ -1 +0,0 @@
-Bundle resources go in here!
\ No newline at end of file
diff --git a/settings.gradle b/settings.gradle
index 0a118ec458b406dfae388bf1218c2abd4bfd356b..3dd742cc3db674b63a4f2a43a1651ecf773747ae 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -5,19 +5,13 @@ include 'openhab-mock'
 include 'integration'
 include ':benchmark'
 include ':commons.color'
-include ':skywriter-hue-integration'
-include ':org.openhab.action.machinelearn'
-include ':org.openlicht.action.reinforcementlearning'
-include ':stub.org.openhab.core.scriptengine.action'
 include ':feedbackloop.analyze'
 include ':feedbackloop.plan'
 include ':feedbackloop.execute'
 include ':feedbackloop.api'
 include ':feedbackloop.main'
-include ':ml_test'
 include ':feedbackloop.monitor'
 include ':feedbackloop.learner'
-include ':influx_test'
 include ':eraser.spark'
 include ':eraser.starter'
 include ':feedbackloop.learner_backup'
diff --git a/stub.org.openhab.core.scriptengine.action/.gitignore b/stub.org.openhab.core.scriptengine.action/.gitignore
deleted file mode 100644
index 70b583e34c3316bcd77c807e2d6b85db5e7d49f6..0000000000000000000000000000000000000000
--- a/stub.org.openhab.core.scriptengine.action/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/build/
-/bin/
-logs/
diff --git a/stub.org.openhab.core.scriptengine.action/build.gradle b/stub.org.openhab.core.scriptengine.action/build.gradle
deleted file mode 100644
index 128c3aa9c93c8dc9dd4aff8a245930bfb21b555a..0000000000000000000000000000000000000000
--- a/stub.org.openhab.core.scriptengine.action/build.gradle
+++ /dev/null
@@ -1,7 +0,0 @@
-sourceSets {
-    main {
-        java {
-            srcDir 'src/main/java'
-        }
-    }
-}
diff --git a/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ActionDoc.java b/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ActionDoc.java
deleted file mode 100644
index 02434e332e6759ef1e2f3ceb0d43786888a3a3e2..0000000000000000000000000000000000000000
--- a/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ActionDoc.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package org.openhab.core.scriptengine.action;
-
-import java.lang.annotation.*;
-
-/**
- * Stub to make projects build using ActionDoc.
- *
- * @author rschoene - Initial contribution
- */
-@Target(ElementType.METHOD)
-@Inherited
-@Retention(RetentionPolicy.RUNTIME)
-public @interface ActionDoc {
-  String text();
-
-  String returns() default "";
-}
diff --git a/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ActionService.java b/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ActionService.java
deleted file mode 100644
index 877f7b8091527b28a198e1f84027d92611198f51..0000000000000000000000000000000000000000
--- a/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ActionService.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.openhab.core.scriptengine.action;
-
-/**
- * Stub to make projects build using ActionService.
- *
- * @author rschoene - Initial contribution
- */
-public interface ActionService {
-
-  /**
-   * returns the FQCN of the action class.
-   *
-   * @return the FQCN of the action class
-   */
-  String getActionClassName();
-
-  /**
-   * Returns the action class itself
-   *
-   * @return the action class
-   */
-  Class<?> getActionClass();
-
-}
diff --git a/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ParamDoc.java b/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ParamDoc.java
deleted file mode 100644
index e9134a198d4c84f1cb76a663ccaeea2ce522242e..0000000000000000000000000000000000000000
--- a/stub.org.openhab.core.scriptengine.action/src/main/java/org/openhab/core/scriptengine/action/ParamDoc.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package org.openhab.core.scriptengine.action;
-
-import java.lang.annotation.*;
-
-/**
- * Stub to make projects build using ParamDoc.
- *
- * @author rschoene - Initial contribution
- */
-@Target(ElementType.PARAMETER)
-@Inherited
-@Retention(RetentionPolicy.RUNTIME)
-public @interface ParamDoc {
-  String name();
-
-  String text() default "";
-}
diff --git a/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml b/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml
deleted file mode 100644
index 867ec439d0a32dcb5f8b3e2d0c7485d7d8da418c..0000000000000000000000000000000000000000
--- a/stub.org.openhab.core.scriptengine.action/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration>
-    <Appenders>
-        <Console name="Console">
-            <PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
-        </Console>
-        <RollingFile name="RollingFile" fileName="logs/eraser.log"
-                    filePattern="logs/eraser-%i.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
-            <Policies>
-                <OnStartupTriggeringPolicy/>
-            </Policies>
-            <DefaultRolloverStrategy max="20"/>
-        </RollingFile>
-    </Appenders>
-    <Loggers>
-        <Root level="debug">
-            <AppenderRef ref="Console"/>
-            <AppenderRef ref="RollingFile"/>
-        </Root>
-    </Loggers>
-</Configuration>