Skip to content
Snippets Groups Projects
Commit 6c394865 authored by René Schöne's avatar René Schöne
Browse files

Remove unused modules.

parent 7cb455b6
Branches
No related tags found
1 merge request!19dev to master
Showing
with 0 additions and 1140 deletions
encog,BasicNetwork,java,3.4.0,1,1554196571101
[BASIC]
[BASIC:PARAMS]
[BASIC:NETWORK]
beginTraining=0
connectionLimit=0
contextTargetOffset=0,0,0
contextTargetSize=0,0,0
endTraining=2
hasContext=f
inputCount=4
layerCounts=1,8,5
layerFeedCounts=1,7,4
layerContextCount=0,0,0
layerIndex=0,1,9
output=0.2537517424,0.3154675575,-0.8739039638,-0.4408848221,-0.8484433638,-0.999915299,-0.6964984771,-0.208278439,1,0,0,-0.4545454545,0.3559322034,1
outputCount=1
weightIndex=0,8,43
weights=0.5976774048,-0.7925906525,0.7127327881,-0.9611660362,0.8031350986,-0.7286657218,1.0990482817,-0.5985785536,-0.0783115433,0.575612931,1.1267500918,1.7184744034,0.2271044512,-1.0525796764,0.0900869671,1.1492323512,0.6141715555,-1.0455927965,-0.0925453451,0.2471651431,2.3634316872,0.3939369257,0.4607437082,-0.1435186798,0.8428535365,-0.0848896791,-0.070602589,-1.2640263565,2.4899996734,-0.2185394776,10.3421332361,-0.1650898311,-0.2750133571,-0.79680959,-0.8051139953,0.8219933747,-0.0727160299,-0.4609522002,-1.0410685492,-0.5354063412,0.3028724456,-0.6835374219,0.169591233
biasActivation=0,1,1
[BASIC:ACTIVATION]
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationLinear"
encog,BasicNetwork,java,3.4.0,1,1548158734516
[BASIC]
[BASIC:PARAMS]
[BASIC:NETWORK]
beginTraining=0
connectionLimit=0
contextTargetOffset=0,0,0
contextTargetSize=0,0,0
endTraining=2
hasContext=f
inputCount=4
layerCounts=4,8,5
layerFeedCounts=4,7,4
layerContextCount=0,0,0
layerIndex=0,4,12
output=0.6991387348,-0.8711034513,-0.996886038,-0.832747291,-0.0935682806,-0.9996163977,0.5399150265,0.9411173394,-0.5084989975,0.4850010791,0.9999999957,1,0,-0.6666666667,-0.4545454545,0.6949152542,1
outputCount=4
weightIndex=0,32,67
weights=-2.6901880743,0.6512821123,-1.2270002115,1.63124668,0.1982387305,-0.2994789552,1.5833040739,-0.9450411677,2.0541422847,-0.718279397,-1.1761952241,0.5028631512,0.0690323612,-1.496141565,-0.1955149568,-0.7453976822,-0.3691141073,0.9854755554,2.2113850088,-1.5216550292,0.9652087936,-1.3028209693,-1.3346156171,0.4142247818,1.0821207364,0.1987534858,0.6202881884,-0.2940331887,-1.4643282498,2.6960334656,-0.0167663298,-2.9907087565,0.3469960227,-0.0441249736,-2.5998575813,-0.7106361301,-0.8111809962,2.2216158678,-0.5482762437,-1.7996398291,-3.6734127565,-2.9102547958,0.4845401914,0.3760471288,-0.0124987546,0.3784047483,0.5860932613,-0.2682876707,0.7429004186,-7.559247176,-3.4421363532,1.1989747484,-2.3340717496,-1.4740773042,-0.7795788072,-1.8241693655,-0.630132295,-0.8191869009,-0.4060569987,-1.0997423162,-0.5495165849,0.1407829068,-2.2964930412,0.0798893221,-19.5271913755,2.0474187009,-0.2622671892
biasActivation=0,1,1
[BASIC:ACTIVATION]
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationLinear"
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
filePattern="logs/jastadd-mquat-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
/build/
/bin/
logs/
apply plugin: 'application'
dependencies {
compile project(':eraser-base')
}
run {
mainClassName = 'de.tudresden.inf.st.eraser.ml_test.Main'
standardInput = System.in
if (project.hasProperty("appArgs")) {
args Eval.me(appArgs)
}
}
sourceSets {
main {
java {
srcDir 'src/main/java'
}
}
}
package de.tudresden.inf.st.eraser.ml_test;
import de.tudresden.inf.st.eraser.jastadd.model.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
public class Main {
private static final Logger logger = LogManager.getLogger(Main.class);
public static void main(String[] args) {
logger.info("Hello World!");
createAndTestBrightnessNetwork();
createAndTestColorNetwork();
}
private static Root createModel() {
Root root = Root.createEmptyRoot();
Group group = new Group();
group.setID("Group1");
root.getSmartHomeEntityModel().addGroup(group);
Item activityItem = newItem("activity", "Recognized activity", false, 8);
Item brightnessItem = newItem("brightness", "Measured brightness", false, 5);
group.addItem(activityItem);
group.addItem(brightnessItem);
return root;
}
private static NumberItem newItem(String id, String label, boolean defaultSendState, int initialState) {
NumberItem item = new NumberItem();
item.setID(id);
item.setLabel(label);
if (defaultSendState) {
item.enableSendState();
} else {
item.disableSendState();
}
item.setState(initialState);
return item;
}
private static final int REPETITIONS = 20;
private static final boolean CHANGE_WEIGHTS_IN_BETWEEN = true;
private static final Random random = new Random(0);
private static void classifyTimed(
NeuralNetworkRoot nn,
Function<NeuralNetworkRoot, DoubleNumber> classify,
Function<DoubleNumber, String> leafToString) {
List<String> results = new ArrayList<>();
List<Long> times = new ArrayList<>();
for (int i = 0; i < REPETITIONS; i++) {
long before = System.nanoTime();
DoubleNumber classification = classify.apply(nn);
long diff = System.nanoTime() - before;
results.add(leafToString.apply(classification));
times.add(TimeUnit.NANOSECONDS.toMillis(diff));
if (CHANGE_WEIGHTS_IN_BETWEEN) {
HiddenNeuron hiddenNeuron = nn.getHiddenNeuron(random.nextInt(nn.getNumHiddenNeuron()));
NeuronConnection connection = hiddenNeuron.getOutput(random.nextInt(hiddenNeuron.getNumOutput()));
connection.setWeight(hiddenNeuron.getNumOutput() * random.nextDouble());
nn.flushTreeCache();
}
}
logger.info("Classification results: {}", results);
logger.info("Took {}ms", times.stream().map(l -> Long.toString(l)).collect(Collectors.joining("ms, ")));
logger.info("Took on average: {}ms",
Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).average().orElse(-1));
logger.info("Took on median: {}ms",
Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).sorted()
.skip((REPETITIONS-1)/2).limit(2-REPETITIONS%2).average().orElse(Double.NaN));
}
private static class PreparationResult {
OutputLayer outputLayer;
DoubleArrayDoubleFunction sigmoid;
InputNeuron activity;
InputNeuron brightness;
NeuralNetworkRoot nn;
HiddenNeuron[] hiddenNeurons;
PreparationResult(OutputLayer outputLayer, DoubleArrayDoubleFunction sigmoid, InputNeuron activity,
InputNeuron brightness, NeuralNetworkRoot nn, HiddenNeuron[] hiddenNeurons) {
this.outputLayer = outputLayer;
this.sigmoid = sigmoid;
this.activity = activity;
this.brightness = brightness;
this.nn = nn;
this.hiddenNeurons = hiddenNeurons;
}
}
private static PreparationResult prepareNetwork() {
Root root = createModel();
Item activityItem = root.getSmartHomeEntityModel().resolveItem("activity").orElseThrow(
() -> new RuntimeException("Activity not found"));
Item brightnessItem = root.getSmartHomeEntityModel().resolveItem("brightness").orElseThrow(
() -> new RuntimeException("Brightness not found"));
NeuralNetworkRoot nn = new NeuralNetworkRoot();
DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
// input layer (2 neurons)
InputNeuron activity = new InputNeuron();
activity.setItem(activityItem);
InputNeuron brightness = new InputNeuron();
brightness.setItem(brightnessItem);
nn.addInputNeuron(activity);
nn.addInputNeuron(brightness);
OutputLayer outputLayer = new OutputLayer();
nn.setOutputLayer(outputLayer);
// hidden layer (10 neurons)
HiddenNeuron[] hiddenNeurons = new HiddenNeuron[10];
for (int hiddenIndex = 0; hiddenIndex < hiddenNeurons.length; hiddenIndex++) {
HiddenNeuron hiddenNeuron = new HiddenNeuron();
hiddenNeuron.setActivationFormula(sigmoid);
nn.addHiddenNeuron(hiddenNeuron);
activity.connectTo(hiddenNeuron, 1.0/2.0);
brightness.connectTo(hiddenNeuron, 1.0/2.0);
}
root.getMachineLearningRoot().setPreferenceLearning(nn);
return new PreparationResult(outputLayer, sigmoid, activity, brightness, nn, hiddenNeurons);
}
/**
* Purpose: Create a neural network with 3 layers (2 + 10 + 1 neurons)
* Sigmoid function for all layers, combinator of output is identity function
*/
private static void createAndTestBrightnessNetwork() {
/*
- Helligkeit NN:
- arbeitet momentan mit Zonen und nicht mit einzelnen Lampen
- 3 Layers
- Input Layer hat Neuronen (Aktivitätsnummer, Wert vom Helligkeitssensor)
- Hidden Layer hat 10 Neuronen
- Output Layer hat 1 Neuron ( Helligkeitswert)
- Aktivierungsfunktion: Sigmoidfunktion <- selbe für alle Layers
*/
PreparationResult pr = prepareNetwork();
OutputNeuron output = new OutputNeuron();
output.setLabel("Brightness_Output");
output.setActivationFormula(pr.sigmoid);
pr.outputLayer.addOutputNeuron(output);
// we just have one output neuron, thus use IdentityFunction
pr.outputLayer.setCombinator(inputs -> inputs[0]);
for (HiddenNeuron hiddenNeuron : pr.hiddenNeurons) {
hiddenNeuron.connectTo(output, 1.0/pr.hiddenNeurons.length);
}
classifyTimed(pr.nn, NeuralNetworkRoot::internalClassify,
classification -> Double.toString(classification.number));
}
/**
* Purpose: Create a neural network with 3 layers (2 + 6 + 3 neurons)
* Sigmoid function for all layers, combinator creates RGB value in hex form
*/
private static void createAndTestColorNetwork() {
PreparationResult pr = prepareNetwork();
for (int i = 0; i < 3; i++) {
OutputNeuron output = new OutputNeuron();
output.setLabel("Brightness_Output_" + i);
output.setActivationFormula(inputs -> Arrays.stream(inputs).sum());
pr.outputLayer.addOutputNeuron(output);
}
// we have three output neurons, combine them to a double value (representing RGB)
pr.outputLayer.setCombinator(inputs -> 65536 * Math.ceil(255.0 * inputs[0]) + 256 * Math.ceil(255.0 * inputs[1]) + Math.ceil(255.0 * inputs[0]));
for (HiddenNeuron hiddenNeuron : pr.hiddenNeurons) {
for (int outputIndex = 0; outputIndex < pr.outputLayer.getNumOutputNeuron(); outputIndex++) {
hiddenNeuron.connectTo(pr.outputLayer.getOutputNeuron(outputIndex), random.nextDouble() * 1.0/pr.hiddenNeurons.length);
}
}
classifyTimed(pr.nn, NeuralNetworkRoot::internalClassify,
classification -> Double.toHexString(classification.number));
// long before = System.nanoTime();
// DoubleNumber classification = nn.classify();
// long diff = System.nanoTime() - before;
// logger.info("Classification: {}", );
// logger.debug("Took {}ms", TimeUnit.NANOSECONDS.toMillis(diff));
}
}
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/eraser.log"
filePattern="logs/eraser-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="info">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
/build/
/bin/
logs/
repositories {
mavenCentral()
}
sourceCompatibility = 1.8
apply plugin: 'java'
apply plugin: 'application'
dependencies {
compile project(':eraser-base')
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: "${jackson_version}"
}
run {
mainClassName = 'de.tudresden.inf.st.eraser.ml_test_boqi.Main'
standardInput = System.in
if (project.hasProperty("appArgs")) {
args Eval.me(appArgs)
}
}
sourceSets {
main {
java {
srcDir 'src/main/java'
}
}
}
package de.tudresden.inf.st.eraser.ml_test_boqi;
import de.tudresden.inf.st.eraser.jastadd.model.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.math3.stat.StatUtils;
import de.tudresden.inf.st.eraser.jastadd.model.Item;
public class Main {
private static final Logger logger = LogManager.getLogger(Main.class);
public static void main(String[] args) {
logger.info("Hello World!");
createAndTestBrightnessNetwork();
}
private static Root createModel() {
Root model = Root.createEmptyRoot();
Group group = new Group();
group.setID("Group1");
model.addGroup(group);
// inputs items muss normalize 1.0, 0.06666666666666665, 0.4545454545454546, -0.5593220338983051, 1(bias)
NumberItem monthItem = new NumberItem();
monthItem.setState(-1.0);
monthItem.setID("month");
monthItem.setLabel("datetime-month");
NumberItem dayItem = new NumberItem();
dayItem.setState(0.2666666666666666);
dayItem.setID("day");
dayItem.setLabel("datetime-day");
NumberItem hourItem = new NumberItem();
hourItem.setState(-0.6363636363636364);
hourItem.setID("hour");
hourItem.setLabel("datetime-hour");
NumberItem minuteItem = new NumberItem();
minuteItem.setState(-0.5593220338983051);
minuteItem.setID("minute");
minuteItem.setLabel("datetime-minute");
NumberItem biasItem = new NumberItem();
biasItem.setState(1);
biasItem.setID("bias");
biasItem.setLabel("bias");
group.addItem(monthItem);
group.addItem(dayItem);
group.addItem(hourItem);
group.addItem(minuteItem);
group.addItem(biasItem);
return model;
}
private static final int REPETITIONS = 1;
private static void classifyTimed(
NeuralNetworkRoot nn,
Function<NeuralNetworkRoot, DoubleNumber> classify,
Function<DoubleNumber, String> leafToString) {
List<String> results = new ArrayList<>();
List<Long> times = new ArrayList<>();
long before = System.nanoTime();
DoubleNumber classification = classify.apply(nn);
long diff = System.nanoTime() - before;
results.add(leafToString.apply(classification));
times.add(TimeUnit.NANOSECONDS.toMillis(diff));
logger.info("Classification results: {}", results);
logger.info("Took {}ms", String.join("ms, ", times.stream().map(l -> Long.toString(l)).collect(Collectors.toList())));
logger.info("Took on average: {}ms",
Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).average().orElse(-1));
logger.info("Took on median: {}ms",
Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).sorted()
.skip((REPETITIONS - 1) / 2).limit(2 - REPETITIONS % 2).average().orElse(Double.NaN));
}
/**
* Purpose: Create a neural network with 3 layers (5 + 8 + 4 neurons)
*/
private static void createAndTestBrightnessNetwork() {
Root model = createModel();
Item monthItem = model.resolveItem("month").orElseThrow(
() -> new RuntimeException("Month not found"));
Item dayItem = model.resolveItem("day").orElseThrow(
() -> new RuntimeException("Day not found"));
Item hourItem = model.resolveItem("hour").orElseThrow(
() -> new RuntimeException("Hour not found"));
Item minuteItem = model.resolveItem("minute").orElseThrow(
() -> new RuntimeException("Minute not found"));
Item biasItem = model.resolveItem("bias").orElseThrow(
() -> new RuntimeException("Bias not found"));
NeuralNetworkRoot nn = new NeuralNetworkRoot();
DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
DoubleArrayDoubleFunction tanh= inputs ->Math.tanh(Arrays.stream(inputs).sum());
DoubleArrayDoubleFunction function_one= inputs->function_one();
//Weights outputs from learner Module
ArrayList<Double> weights= new ArrayList<Double>(Arrays.asList(
-4.8288886204,0.6723236931,2.1451097188,-0.8551053267,-0.7858304445,4.1369566727,-3.3096691918,
-0.2190980261,2.6871317298,1.2272772167,-2.5292510941,-1.2860407542,-4.2280191541,1.004752063,
0.8345207039,0.0123185817,-0.5921808915,0.0967336988,-0.305892589,0.5572392781,-0.7190098073,
-1.6247354373,0.4589248822,-0.0269816271,2.2208040852,-3.6281085698,0.2204999381,4.7263701556,
-4.8348948698,0.231141867,8.7120706018,-1.4912707741,0.9482851705,0.1377551973,-6.6525856465,
-1.321197315,-2.7369948929,17.664289214,-3.1279212743,-0.8245974167,-1.4251924355,0.8370511414,
2.0841638143,-0.210152817,-1.9414132298,-1.7973688846,-2.1977997794,-3.6046836685,-3.3403186721,
-6.1556924635,-2.8952903587,-1.0773989561,0.2300429028,-0.2184650371,0.0297181797,0.5709092417,
1.3960358442,-3.1577981239,0.0423944625,-17.8143314027,-1.4439317172,-0.5137688896,1.0166045804,
0.3059149818,1.0938282764,0.6203368549,0.702449827));
// input layer
InputNeuron month = new InputNeuron();
month.setItem(monthItem);
InputNeuron day = new InputNeuron();
day.setItem(dayItem);
InputNeuron hour = new InputNeuron();
hour.setItem(hourItem);
InputNeuron minute = new InputNeuron();
minute.setItem(minuteItem);
InputNeuron bias = new InputNeuron();
bias.setItem(biasItem);
nn.addInputNeuron(month);
nn.addInputNeuron(day);
nn.addInputNeuron(hour);
nn.addInputNeuron(minute);
nn.addInputNeuron(bias);
// output layer
OutputLayer outputLayer = new OutputLayer();
OutputNeuron output0 = new OutputNeuron();
output0.setActivationFormula(tanh);
OutputNeuron output1 = new OutputNeuron();
output1.setActivationFormula(tanh);
OutputNeuron output2 = new OutputNeuron();
output2.setActivationFormula(tanh);
OutputNeuron output3 = new OutputNeuron();
output3.setActivationFormula(tanh);
outputLayer.addOutputNeuron(output0);
outputLayer.addOutputNeuron(output1);
outputLayer.addOutputNeuron(output2);
outputLayer.addOutputNeuron(output3);
outputLayer.setCombinator(inputs->predictor(inputs));
nn.setOutputLayer(outputLayer);
// hidden layer
HiddenNeuron[] hiddenNeurons = new HiddenNeuron[8];
for (int i = 0; i < (hiddenNeurons.length); i++) {
if (i==7){
HiddenNeuron hiddenNeuron = new HiddenNeuron();
hiddenNeuron.setActivationFormula(function_one);
hiddenNeurons[i] = hiddenNeuron;
nn.addHiddenNeuron(hiddenNeuron);
bias.connectTo(hiddenNeuron,1.0);
hiddenNeuron.connectTo(output0, weights.get(i));
hiddenNeuron.connectTo(output1, weights.get(i+8));
hiddenNeuron.connectTo(output2, weights.get(i+8*2));
hiddenNeuron.connectTo(output3, weights.get(i+8*3));
}
else{
HiddenNeuron hiddenNeuron = new HiddenNeuron();
hiddenNeuron.setActivationFormula(tanh);
hiddenNeurons[i] = hiddenNeuron;
nn.addHiddenNeuron(hiddenNeuron);
month.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4)+i*5));
day.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+1)+i*5));
hour.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+2)+i*5));
minute.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+3)+i*5));
bias.connectTo(hiddenNeuron,weights.get((hiddenNeurons.length*4+4)+i*5));
hiddenNeuron.connectTo(output0, weights.get(i));
hiddenNeuron.connectTo(output1, weights.get(i+8));
hiddenNeuron.connectTo(output2, weights.get(i+8*2));
hiddenNeuron.connectTo(output3, weights.get(i+8*3));}
}
model.getMachineLearningRoot().setPreferenceLearning(nn);
System.out.println(model.prettyPrint());
classifyTimed(nn, NeuralNetworkRoot::classify,
classification -> Double.toString(classification.number));
}
private static double function_one() {
return 1.0;
}
private static double predictor(double[] inputs) {
int index=0;
double maxinput=StatUtils.max(inputs);
System.out.println(inputs);
for (int i = 0; i < inputs.length; i++)
{
if (inputs[i] == maxinput){
index=i;
}
}
//outputs from learner
ArrayList<Double> outputs= new ArrayList<Double>(Arrays.asList(2.0,1.0,3.0,0.0));
double output=outputs.get(index);
return output;
}
}
//inputs:
//[BasicMLData:-1.0,0.2666666666666666,-0.6363636363636364,-0.5593220338983051]
//outputs:
//[BasicMLData:-0.9151867668336432,-0.1568555041251098,-0.9786996639280675,-0.9436628188408074]
//[7, 20, 12, 13] -> predicted: 1(correct: 2)
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
filePattern="logs/jastadd-mquat-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
build/
/bin/
logs/
Manifest-Version: 1.0
Private-Package: org.openhab.action.machinelearn.internal
Ignore-Package: org.openhab.action.machinelearn.internal
Bundle-License: http://www.eclipse.org/legal/epl-v10.html
Bundle-Name: openHAB Machine Learning Action
Bundle-SymbolicName: org.openhab.action.machinelearn
Bundle-Vendor: openHAB.org
Bundle-Version: 1.11.0.qualifier
Bundle-Activator: org.openhab.action.machinelearn.internal.MachineLearnActivator
Bundle-ManifestVersion: 2
Bundle-Description: This is the Machine Learning action of the open Home Aut
omation Bus (openHAB)
Import-Package: org.openhab.core.scriptengine.action,
org.osgi.framework,
org.osgi.service.cm,
org.osgi.service.component,
org.slf4j
Bundle-DocURL: http://www.openhab.org
Bundle-RequiredExecutionEnvironment: JavaSE-1.7
Service-Component: OSGI-INF/action.xml
Bundle-ClassPath: .,
lib/weka.jar
Bundle-ActivationPolicy: lazy
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (c) 2010-2016 by the respective copyright holders.
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
http://www.eclipse.org/legal/epl-v10.html
-->
<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="activate" deactivate="deactivate" immediate="true" name="org.openhab.action.machinelearn.action">
<implementation class="org.openhab.action.machinelearn.internal.MachineLearnActionService" />
<service>
<provide interface="org.openhab.core.scriptengine.action.ActionService" />
<provide interface="org.osgi.service.cm.ManagedService" />
</service>
<property name="service.pid" type="String" value="org.openhab.machinelearn" />
</scr:component>
# OpenHAB machine learning action plugin
## Installation
Copy a .jar file containing the plugin into the OpenHAB's `addons` directory.
The action will then provide several methods to use ML functionality.
## How does this work?
The plugin has a collection of machine learning models, of which only one is
active at a time. New models will be created if incoming _labeled_ instances
have a set of attributes different from what the current models have, but these
models will be inactive, until a user-triggered re-evaluation is performed. The
model with the best evaluation score will be made active. Incoming _labeled_
instances also form a _training window_, which can be used for retraining the
existing models (which can be useful to fight concept drift).
## A minimal example
```java
// Read the docs about ARFF in the internet
// Internally, this creates one model trained on the dataset
buildFromDataset('/path/to/dataset.arff')
// Assemble an instance that has to be predicted, somehow...
Map<String, Double> instance = assembleInstance()
// Get the prediction (class index if dataset was for classification
// or the regression value if dataset was for regression).
double result = classify(instance)
// If anotherInstance is labeled, it will be added to the training window
// of the created model, but the modeled will not be retrained.
result = classify(labeledInstance)
// Retrain all models (only one, currently) on their training windows.
retrain()
// If such instance is labeled, a new model with an appropriate featureset
// will be silently created, and trained on the instance. The returned
// prediction, however, will be made by a currently active model (the one that
// was built from dataset) by ignoring new features and setting the values of
// missing features to some value (zero or mean).
result = classify(instanceWithMissingFeature_orNewFeature)
// Both models now make predictions, but only the result of a currently active
// model is returned
result = classify(someUnlabeledInstance)
// The recent performance of all models is compared, and the one with the best
// score is made active.
evaluate()
```
## Available public methods
* `void saveModel(String path); void loadModel(String path)`
Save the set of predictive models that resulted as a work of this plugin or load
the previously saved model set.
* `void buildFromDataset(String path)`
If `path` contains an ARFF formated dataset, where the attribute to be predicted
is named `label`, then the plugin will erase everything and create the set with
one model, built from that dataset and set that model is active.
* `double classify(Map<String, Double> row)`
Pass a dictionary with an input instance (names of attributes mapped to their
values in the instance). All models will make predictions for this instance, but
only the prediction made by the currently active model will be returned. If none
of the models is active, then an exception indicating that issue will be thrown.
If an input instance has a set of attributes, that none of the trained models
has, then the existing models will still try to predict that instance. If the
instance lacks some attributes, that are present in the models, then the
models will replace the resulting missing value with zero. If the instance has
attributes that are not present in models, these will be ignored by models.
If such instance has a label, though, then a new model will be created and
trained on this single instance. The new model will be inactive until a user
demands re-evaluation. For such models, every incoming labeled instance will be
added to their training window, and the model will be retrained every time the
window grows, until the training window is filled. After that retraining happens
only on user's demand.
Labeled instances will be added to the head of the training window of all the
models, removing the instance on the tail if the window is full.
* `void evaluate()`
The models will store their recent prediction history of labeled instances, thus
they are aware of their recent performance. Calling `evaluate` will set the
model with the best recent history as an active one.
* `void retrain()`
Retrain all existing models on their training windows. Reset the recent history
of all models.
* `void reset()`
Erase the current model set.
dependencies {
compile files('lib/weka.jar')
compile project(':stub.org.openhab.core.scriptengine.action')
compile group: 'org.osgi', name: 'org.osgi.framework', version: '1.9.0'
compile group: 'org.osgi', name: 'org.osgi.service.cm', version: '1.6.0'
compile group: 'org.osgi', name: 'org.osgi.service.component', version: '1.4.0'
compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'
}
sourceSets {
main {
java {
srcDir 'src/main/java'
}
}
}
File deleted
package org.openhab.action.machinelearn.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import weka.classifiers.Classifier;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.converters.ConverterUtils.DataSource;
//Machine Learning Model class, stores classifier and data on which it was trained
class MLUnit implements java.io.Serializable {
private static final long serialVersionUID = -4328733681884286806L;
private static final double alpha = 0.1;
private Classifier cls;
private Instances data;
// How much data-history to store
private int capacity;
// How important is this classifier
private double weight;
private double history = 1.0;
protected MLUnit(String dataset, Class<? extends Classifier> clsc) throws Exception {
data = DataSource.read(dataset);
capacity = data.numInstances();
data.setClass(data.attribute("label"));
weight = 1.0;
cls = clsc.newInstance();
build();
}
protected MLUnit(Set<String> attNames, Class<? extends Classifier> clsc, int window) throws Exception {
// Initialize data entity (see Weka documentation)
ArrayList<Attribute> atts = new ArrayList<Attribute>();
int classIndex = -1;
for (String name : attNames) {
if (name.equals("label")) {
classIndex = atts.size();
}
atts.add(new Attribute(name));
}
// Throw exception if no "label" field found
if (classIndex == -1) {
throw new Exception(attNames.toString());
}
this.data = new Instances("thinkaboutit", atts, 0);
this.data.setClassIndex(classIndex);
// Create classifier
this.cls = clsc.newInstance();
this.capacity = window;
this.weight = 0.0;
}
private void build() throws Exception {
// When there is data - retrain (or train)
cls.buildClassifier(data);
}
protected Set<String> getAttributeSet() {
// Return set of attributes this model contains
Set<String> tmp = new HashSet<String>();
for (Attribute a : Collections.list(data.enumerateAttributes())) {
tmp.add(a.name());
}
tmp.add("label");
return tmp;
}
protected void setWeight(double w) {
this.weight = w;
}
protected double getWeight() {
return this.weight;
}
protected double getHistory() {
return this.history;
}
protected boolean isMatch(Set<String> keys) {
// Check if you can use this model for incoming data
return keys.equals(getAttributeSet());
}
protected double incoming(Map<String, Double> row) throws Exception {
// Fill in new data row
double[] values = new double[data.numAttributes()];
for (int i = 0; i < values.length; i++) {
Double num = row.get(data.attribute(i).name());
values[i] = num == null ? 0 : num.doubleValue();
}
Instance inst = new DenseInstance(1.0, values);
inst.setDataset(data);
double clsResult = cls.classifyInstance(inst);
// If data was labeled add it to the model
if (row.get("label") != null) {
data.add(inst);
double[] distro = cls.distributionForInstance(inst);
if (distro.length == 1) {
history = (1 - alpha) * history + alpha * Math.pow(clsResult - row.get("label"), 2);
} else {
history = (1 - alpha) * history + alpha * (clsResult == row.get("label") ? 0.0 : 1.0);
}
if (data.numInstances() < capacity) {
// Retrain on every instance until capacity is reached
// after that only on demand (e.g. sensor malfunction, concept
// drift)
build();
} else {
data.delete(0);
}
}
// Classify and return result
return clsResult;
}
}
/**
* Copyright (c) 2010-2016 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.action.machinelearn.internal;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openhab.core.scriptengine.action.ActionDoc;
import org.openhab.core.scriptengine.action.ParamDoc;
import weka.classifiers.functions.MultilayerPerceptron;
import weka.classifiers.trees.J48;
/**
* This class provides static methods that can be used to trigger
* machine learning algorithms
*
* @author Pavel Lutskov
* @since 1.9.0
*/
public class MachineLearn {
// This is the model (list of classifiers, usually one or two)
private static List<MLUnit> clfs = new ArrayList<MLUnit>();
// Debug string for stack traces
private static String dbg = "";
// Debug string for program path examination
private static String dbg2 = "";
// Method to trigger calculation
@ActionDoc(text = "Run regression on given data", returns = "regression result")
public static synchronized double classify(
@ParamDoc(name = "row", text = "instance for classification") Map<String, Double> row) throws Exception {
// If there is no model for this data-point, then create a new one
if (!existsMatch(row.keySet())) {
dbg2 += ":no match found:";
try {
dbg2 += ":try to create new unit:";
MLUnit cls = new MLUnit(row.keySet(), MultilayerPerceptron.class, 500);
clfs.add(cls);
} catch (Exception e) {
dbg2 += ":fail to create new unit:";
handle(e);
}
}
List<Double> results = new ArrayList<Double>();
double fullWeight = 0.0;
// Try classification on existing models (might be extended to ensemble learning)
for (MLUnit cls : clfs) {
try {
dbg2 += ":try classification:";
results.add(cls.incoming(row) * cls.getWeight());
fullWeight += cls.getWeight();
} catch (Exception e) {
dbg2 += ":failed classification:";
results.add(0.0);
handle(e);
}
}
if (fullWeight == 0.0) {
throw new Exception("Classification of the instance is impossible.");
}
// Result is weighted sum of outputs from all models in list (in this version active model has weight 1, and
// others 0)
dbg2 += ":end results:";
double result = 0.0;
for (double d : results) {
result += d;
}
// If there is no model evaluated as good, then return 0
return fullWeight == 0.0 ? 0.0 : result / fullWeight;
}
// Run evaluation to activate/deactivate present models
@ActionDoc(text = "Evaluate model on labeled data and see which model is the best")
public static synchronized void evaluate() {
double best = Double.POSITIVE_INFINITY;
MLUnit winner = null;
for (MLUnit cls : clfs) {
if (cls.getHistory() < best) {
best = cls.getHistory();
winner = cls;
}
}
if (winner != null) {
for (MLUnit cls : clfs) {
cls.setWeight(0.0);
}
winner.setWeight(1.0);
}
}
// Understand if there exists an appropriate model for incoming data-point
@ActionDoc(text = "Private method to realize that new model is necessary")
private static boolean existsMatch(Set<String> keys) {
boolean does = false;
for (MLUnit cls : clfs) {
dbg2 += ":checking unit for match::";
dbg2 += cls.getAttributeSet();
dbg2 += ":" + keys;
dbg2 += ":end checking unit:";
does = cls.isMatch(keys) || does;
}
return does;
}
// Store latest stack trace into dbg string
@ActionDoc(text = "Private method to handle occuring exceptions")
private static void handle(Exception e) {
e.printStackTrace();
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
dbg = sw.toString();
}
// Method to retrieve debug info from rules
@ActionDoc(text = "Get some debug info", returns = "Debug string with the latest stack trace")
public static String getDebug() {
String tmp = dbg;
dbg = "";
return tmp;
}
@ActionDoc(text = "Get more debug info", returns = "More debug info")
public static String getDebu2() {
String tmp = dbg2;
dbg2 = "";
return tmp;
}
@ActionDoc(text = "Reset if something went wrong")
public static void reset() {
clfs = new ArrayList<MLUnit>();
dbg = "";
dbg2 = "";
}
@ActionDoc(text = "Build model from dataset")
public static void buildFromDataset(String path) {
clfs = new ArrayList<>();
try {
clfs.add(new MLUnit(path, J48.class));
} catch (Exception e) {
dbg2 += ":failed loading:";
handle(e);
}
}
@ActionDoc(text = "Save model to disk")
public static void saveModel(String path) {
try (FileOutputStream fos = new FileOutputStream(path); ObjectOutputStream oos = new ObjectOutputStream(fos)) {
dbg2 += ":saving model to " + path + ":";
oos.writeObject(clfs);
} catch (Exception e) {
dbg2 += ":failed saving:";
handle(e);
}
}
@ActionDoc(text = "Load previously stored model")
public static synchronized void loadModel(String path) {
try (FileInputStream fis = new FileInputStream(path); ObjectInputStream ois = new ObjectInputStream(fis)) {
dbg2 += ":loading model from " + path + ":";
clfs = (ArrayList<MLUnit>) ois.readObject();
} catch (Exception e) {
dbg2 += ":failed loading:";
handle(e);
}
}
}
/**
* Copyright (c) 2010-2016 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.action.machinelearn.internal;
import java.util.Dictionary;
import org.openhab.core.scriptengine.action.ActionService;
import org.osgi.service.cm.ConfigurationException;
import org.osgi.service.cm.ManagedService;
/**
* This class registers an OSGi service for the Mail action.
*
* @author Kai Kreuzer
* @since 1.3.0
*/
public class MachineLearnActionService implements ActionService, ManagedService {
/**
* Indicates whether this action is properly configured which means all
* necessary configurations are set. This flag can be checked by the
* action methods before executing code.
*/
/* default */ static boolean isProperlyConfigured = false;
public MachineLearnActionService() {
}
public void activate() {
}
public void deactivate() {
// deallocate Resources here that are no longer needed and
// should be reset when activating this binding again
}
@Override
public String getActionClassName() {
return MachineLearn.class.getCanonicalName();
}
@Override
public Class<?> getActionClass() {
return MachineLearn.class;
}
@Override
@SuppressWarnings("rawtypes")
public void updated(Dictionary config) throws ConfigurationException {
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment