Skip to content
Snippets Groups Projects
Commit 32c522e8 authored by boqiren's avatar boqiren
Browse files

implement interfaces

parent d644869f
No related branches found
No related tags found
No related merge requests found
Showing
with 2343 additions and 0 deletions
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/eraser.log"
filePattern="logs/eraser-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
This diff is collapsed.
/build/
repositories {
mavenCentral()
}
sourceCompatibility = 1.8
apply plugin: 'java'
apply plugin: 'application'
dependencies {
compile project(':eraser-base')
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.8.8.1'
compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.10.0'
compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.10.0'
testCompile group: 'junit', name: 'junit', version: '4.12'
testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '1.0.0.0'
compile 'org.encog:encog-core:3.4'
}
run {
mainClassName = 'de.tudresden.inf.st.eraser.learner_test.Main'
standardInput = System.in
if (project.hasProperty("appArgs")) {
args Eval.me(appArgs)
}
}
sourceSets {
main {
java {
srcDir 'src/main/java'
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
filePattern="logs/jastadd-mquat-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
package de.tudresden.inf.st.eraser.learner_test;
import org.encog.Encog;
import org.encog.ml.MLClassification;
import org.encog.ml.data.MLData;
import org.encog.persist.EncogDirectoryPersistence;
import org.encog.util.csv.CSVFormat;
import org.encog.util.csv.ReadCSV;
import org.encog.util.simple.EncogUtility;
import org.encog.ml.data.versatile.NormalizationHelper;
import org.encog.ml.data.versatile.VersatileMLDataSet;
import org.encog.ml.data.versatile.columns.ColumnDefinition;
import org.encog.ml.data.versatile.columns.ColumnType;
import org.encog.ml.data.versatile.sources.VersatileDataSource;
import org.encog.ml.data.versatile.sources.CSVDataSource;
import org.encog.ml.factory.MLMethodFactory;
import org.encog.ml.model.EncogModel;
import org.encog.ConsoleStatusReportable;
import org.encog.ml.MLRegression;
import java.io.File;
import java.util.Arrays;
import static org.encog.persist.EncogDirectoryPersistence.*;
public class Main {
public static void main(String[] args) {
//mapping the data into model
String savefile = "src/main/java/de/tudresden/inf/st/eraser/learner_test/save_model.eg";
String File = "src/main/java/de/tudresden/inf/st/eraser/learner_test/preference_data.csv";
File file = new File(File);
VersatileDataSource source = new CSVDataSource(file, false, CSVFormat.DECIMAL_POINT);
VersatileMLDataSet data = new VersatileMLDataSet(source);
data.defineSourceColumn("monat", 0, ColumnType.continuous);
data.defineSourceColumn("day", 1, ColumnType.continuous);
data.defineSourceColumn("hour", 2, ColumnType.continuous);
data.defineSourceColumn("minute", 3, ColumnType.continuous);
ColumnDefinition outputColumn = data.defineSourceColumn("labels", 4, ColumnType.continuous);
data.defineSingleOutputOthersInput(outputColumn);
data.analyze();
System.out.println("get data ");
EncogModel model = new EncogModel(data);
model.selectMethod(data, MLMethodFactory.TYPE_FEEDFORWARD);
//model.setReport(new ConsoleStatusReportable());
data.normalize();
NormalizationHelper helper = data.getNormHelper();
System.out.println(helper.toString());
model.holdBackValidation(0.3, true, 1001);
model.selectTrainingType(data);
MLRegression bestMethod = (MLRegression)model.crossvalidate(5, true);
MLClassification bestMethodtest=(MLClassification)model.crossvalidate(5,true);
/**System.out.println( "Training error: " + EncogUtility.calculateRegressionError(bestMethod, model.getTrainingDataset()));
System.out.println( "testTraining error: " + EncogUtility.calculateClassificationError(bestMethodtest, model.getTrainingDataset()));
System.out.println( "Validation error: " + EncogUtility.calculateRegressionError(bestMethod, model.getValidationDataset()));
System.out.println( "testValidation error: " + EncogUtility.calculateClassificationError(bestMethodtest, model.getValidationDataset()));
System.out.println(helper.getClass());
System.out.println(helper.toString());
System.out.println("Final model: " + bestMethod);
System.out.println("Final testmodel: " + bestMethodtest);**/
//NormalizationHelper helper = data.getNormHelper();
//test
String helperstr=helper.toString();
String [] split=helperstr.split(";");
String [] finalStr = split[split.length-1].replace("]","").replace("[","").
split(",");
System.out.println(helper);
// save network...
//to delete
saveObject(new File(savefile), bestMethodtest);
ReadCSV csv = new ReadCSV(File, false, CSVFormat.DECIMAL_POINT);
String[] line = new String[4];
MLData input = helper.allocateInputVector();
System.out.println("input test---------------");
System.out.println(input);
while(csv.next()) {
StringBuilder result = new StringBuilder();
line[0] = csv.get(0);
line[1] = csv.get(1);
line[2] = csv.get(2);
line[3] = csv.get(3);
String correct = csv.get(4);
helper.normalizeInputVector(line,input.getData(),false);
MLData output = bestMethod.compute(input);
System.out.println("inputs:");
System.out.println(input);
System.out.println("outputs:");
System.out.println(output);
String brightnessChosen = helper.denormalizeOutputVectorToString(output)[0];
result.append(Arrays.toString(line));
result.append(" -> predicted: ");
result.append(brightnessChosen);
result.append("(correct: ");
result.append(correct);
result.append(")");
System.out.println(result.toString());
break;
}
// Delete data file and shut down.
//File.delete();
Encog.getInstance().shutdown();
/**Training error: 0.299928703107046
testTraining error: 0.9931740614334471
Validation error: 0.41277024952020763
testValidation error: 0.992*/
}
}
7,20,12,13,2
7,20,14,40,1
7,20,14,40,2
7,21,13,2,2
7,21,13,2,2
7,21,14,23,2
7,21,14,23,2
7,21,15,41,2
7,21,16,54,2
7,21,16,54,2
7,21,17,45,3
7,22,12,28,3
7,22,15,35,2
7,22,15,35,2
7,22,18,59,3
7,22,18,59,3
7,23,12,32,2
7,23,12,32,2
7,23,16,7,2
7,23,16,7,2
7,23,16,7,2
7,23,16,7,2
7,23,16,7,2
7,24,12,4,0
7,24,12,4,0
7,24,12,4,1
7,24,14,38,2
7,24,14,38,2
7,24,18,54,3
7,25,12,31,0
7,25,12,32,1
7,25,12,32,1
7,25,15,6,3
7,25,18,56,3
7,26,13,41,2
7,26,19,14,3
7,27,11,39,2
7,27,11,39,3
7,27,11,46,3
7,27,11,46,2
7,27,13,8,2
7,27,13,8,2
7,27,13,9,2
7,27,13,45,2
7,27,13,45,2
7,27,15,38,3
7,28,12,12,2
7,28,12,13,2
7,28,12,41,2
7,28,12,41,2
7,28,12,41,2
7,28,14,0,1
7,28,14,0,2
7,28,15,21,3
7,28,18,56,3
7,29,10,9,1
7,29,10,9,1
7,29,10,9,1
7,29,11,54,0
7,29,11,54,0
7,29,11,54,0
7,29,11,54,1
7,29,14,10,2
7,29,16,44,2
7,29,16,44,2
7,30,16,7,3
7,30,18,45,3
7,31,13,2,0
7,31,13,2,1
7,31,13,3,1
7,31,13,3,1
7,31,13,3,1
7,31,18,39,3
8,1,12,22,0
8,1,12,22,1
8,1,14,20,2
8,1,14,20,2
8,1,14,20,2
8,1,15,55,3
8,1,18,31,3
8,1,18,37,3
8,1,18,37,3
8,1,19,2,3
8,1,19,2,3
8,1,20,5,3
8,2,10,9,2
8,2,10,9,1
8,2,10,9,2
8,2,10,9,2
8,2,13,58,2
8,2,13,58,2
8,2,15,44,3
8,2,15,44,3
8,2,15,44,3
8,2,17,21,3
8,2,17,21,3
8,2,17,21,3
8,3,13,31,1
8,3,13,31,2
8,3,13,32,2
8,3,16,43,3
8,4,13,20,1
8,4,13,20,2
8,4,18,27,3
8,5,13,37,2
8,5,13,37,2
8,5,18,33,3
8,6,11,24,3
8,6,11,24,3
8,6,11,24,3
8,6,13,50,3
8,7,13,4,2
8,7,13,4,2
8,7,14,56,3
8,8,12,13,2
8,8,12,13,2
8,8,15,51,2
8,8,15,51,2
8,8,15,51,3
8,9,13,32,2
8,9,13,32,2
8,9,13,32,2
8,9,15,8,2
8,9,15,8,2
8,9,15,8,2
8,9,16,19,2
8,10,11,32,0
8,10,11,32,1
8,10,11,32,1
8,10,13,13,1
8,10,13,13,1
8,10,13,13,2
8,10,16,42,3
8,10,16,42,3
8,11,14,6,2
8,11,14,7,2
8,11,18,54,3
8,11,18,54,3
8,11,18,54,3
8,12,12,27,1
8,12,12,27,1
8,12,12,28,1
8,12,13,53,2
8,12,13,53,2
8,12,13,53,2
8,12,15,21,3
8,13,13,16,1
8,13,13,16,1
8,13,13,16,1
8,13,14,14,2
8,13,14,14,2
8,13,16,11,3
8,13,17,18,3
8,14,13,7,1
8,14,13,7,1
8,14,13,7,1
8,14,13,7,1
8,14,13,7,2
8,14,13,7,2
8,14,15,6,3
8,15,14,5,2
8,15,14,5,2
8,15,14,6,2
8,15,14,6,2
8,15,16,41,3
8,15,16,41,3
8,15,17,30,3
8,16,13,40,2
8,16,13,40,2
8,16,17,52,3
8,16,17,53,3
8,17,13,34,1
8,17,13,35,2
8,17,14,7,2
8,17,19,2,3
8,18,10,21,3
8,18,11,14,2
8,18,11,14,2
8,18,11,14,2
8,18,11,14,2
8,18,14,25,2
8,18,14,25,3
8,18,14,25,2
8,18,18,18,3
8,18,18,19,3
8,19,18,33,3
8,19,18,33,3
8,19,18,33,3
8,19,18,33,3
8,20,14,28,2
8,20,14,28,2
8,20,14,28,2
8,20,14,28,2
8,20,17,8,3
8,20,18,22,3
8,21,11,24,1
8,21,11,24,1
8,21,11,24,1
8,21,15,34,3
8,21,18,55,3
8,22,12,3,1
8,22,12,4,2
8,22,12,4,2
8,22,13,51,2
8,22,13,51,2
8,22,13,51,2
8,22,18,12,3
8,22,18,12,3
8,22,18,12,3
8,22,18,12,3
8,22,18,40,3
8,22,18,40,3
8,23,13,42,1
8,23,13,42,1
8,23,17,32,3
8,23,19,28,3
8,23,20,27,3
8,23,20,27,3
8,23,21,49,3
8,24,14,0,2
8,24,14,0,2
8,24,14,0,2
8,24,14,0,2
8,24,15,4,3
8,24,15,4,3
8,24,16,2,3
8,24,16,3,3
8,24,16,37,3
8,24,17,9,3
8,24,17,14,3
8,25,13,34,1
8,25,13,34,1
8,25,13,34,1
8,25,13,34,1
8,25,13,34,1
8,25,15,1,3
8,25,17,58,3
8,26,10,29,0
8,26,10,29,0
8,26,10,29,0
8,26,10,29,0
8,26,10,29,0
8,26,16,42,3
8,26,16,42,3
8,26,18,41,3
8,26,18,41,3
8,27,13,41,2
8,27,13,41,2
8,27,13,41,2
8,27,13,41,2
8,27,17,42,3
8,28,11,9,1
8,28,11,9,1
8,28,12,14,0
8,28,12,14,1
8,28,12,14,0
8,28,15,3,2
8,28,15,3,2
8,28,16,31,3
8,28,17,40,3
8,29,14,44,3
8,29,17,25,3
8,30,12,5,0
8,30,12,5,0
8,30,12,5,0
8,30,13,32,1
8,30,13,32,1
8,30,13,56,2
8,30,14,23,2
8,30,14,23,2
8,30,14,23,2
8,30,14,23,2
8,30,14,41,2
8,30,14,41,2
8,30,14,41,2
8,30,15,50,3
8,30,17,0,3
8,30,18,59,3
8,30,18,59,3
8,31,14,31,2
8,31,14,31,2
8,31,14,31,2
8,31,17,59,3
8,31,18,0,3
9,1,16,13,3
9,1,16,13,3
9,1,16,13,3
9,1,17,41,3
9,2,13,44,1
9,2,13,44,1
9,2,13,44,1
9,2,14,49,2
9,2,14,49,2
9,2,14,49,2
9,2,16,6,3
9,2,16,6,3
9,2,17,2,3
9,3,16,9,3
9,3,17,35,3
9,3,17,36,3
9,4,12,57,1
9,4,12,57,1
9,4,15,8,3
9,4,15,34,3
9,4,16,26,3
9,4,16,26,3
9,4,18,37,3
9,4,18,37,3
9,4,18,37,3
9,6,11,18,0
9,6,11,18,0
9,6,12,54,1
9,6,12,54,1
9,6,14,21,2
9,6,14,21,2
9,6,19,20,3
9,7,11,50,0
9,7,14,17,2
9,7,14,57,3
9,7,14,57,3
9,7,16,56,3
9,7,16,56,3
9,7,16,56,3
9,7,16,56,3
9,7,18,38,3
9,7,18,38,3
9,8,11,4,2
9,8,11,4,2
9,8,11,13,0
9,8,11,13,0
9,8,11,13,0
9,8,11,13,0
9,8,11,13,0
9,8,11,14,0
9,8,11,14,1
9,8,11,14,1
9,8,12,1,0
9,8,12,1,0
9,8,12,1,0
9,8,12,1,0
9,8,12,1,0
9,8,12,1,1
9,8,12,36,0
9,8,12,36,0
9,8,12,36,0
9,8,12,36,0
9,8,12,36,0
9,8,13,37,1
9,8,13,37,1
9,8,13,37,1
9,8,14,20,2
9,8,14,20,2
9,8,18,20,3
9,9,12,47,1
9,9,12,47,2
9,9,12,47,2
9,9,19,5,3
9,10,13,15,1
9,10,13,15,1
9,10,13,15,0
9,10,16,49,3
9,10,19,6,3
9,10,21,5,3
9,11,14,16,2
9,11,14,16,2
9,11,14,16,2
9,11,18,41,3
9,12,14,43,2
9,12,14,43,2
9,12,14,43,2
9,12,16,14,3
9,12,17,12,3
9,12,17,12,2
9,12,17,12,3
9,12,17,12,2
9,12,20,44,3
9,13,19,52,3
9,14,14,39,2
9,14,14,39,2
9,14,15,14,3
9,14,17,29,3
9,14,17,29,3
9,14,17,29,3
9,15,11,41,1
9,15,11,41,1
9,15,13,4,1
9,15,14,3,1
9,15,14,3,2
9,16,12,36,1
9,16,12,36,1
9,16,12,36,1
9,16,12,36,1
9,16,12,48,1
9,16,12,48,1
9,16,13,51,1
9,16,13,51,2
9,16,13,51,1
9,16,15,13,3
9,16,15,14,3
9,16,15,14,3
9,17,10,27,0
9,17,10,27,0
9,17,11,10,0
9,17,11,10,0
9,17,11,10,0
9,17,12,43,1
9,17,12,43,1
9,17,12,43,1
9,17,13,32,1
9,17,13,32,1
9,17,14,5,1
9,17,14,5,2
9,17,14,6,2
9,17,15,7,3
9,17,15,49,3
9,17,15,49,3
9,17,18,12,3
9,17,18,13,3
encog,BasicNetwork,java,3.4.0,1,1554196571101
[BASIC]
[BASIC:PARAMS]
[BASIC:NETWORK]
beginTraining=0
connectionLimit=0
contextTargetOffset=0,0,0
contextTargetSize=0,0,0
endTraining=2
hasContext=f
inputCount=4
layerCounts=1,8,5
layerFeedCounts=1,7,4
layerContextCount=0,0,0
layerIndex=0,1,9
output=0.2537517424,0.3154675575,-0.8739039638,-0.4408848221,-0.8484433638,-0.999915299,-0.6964984771,-0.208278439,1,0,0,-0.4545454545,0.3559322034,1
outputCount=1
weightIndex=0,8,43
weights=0.5976774048,-0.7925906525,0.7127327881,-0.9611660362,0.8031350986,-0.7286657218,1.0990482817,-0.5985785536,-0.0783115433,0.575612931,1.1267500918,1.7184744034,0.2271044512,-1.0525796764,0.0900869671,1.1492323512,0.6141715555,-1.0455927965,-0.0925453451,0.2471651431,2.3634316872,0.3939369257,0.4607437082,-0.1435186798,0.8428535365,-0.0848896791,-0.070602589,-1.2640263565,2.4899996734,-0.2185394776,10.3421332361,-0.1650898311,-0.2750133571,-0.79680959,-0.8051139953,0.8219933747,-0.0727160299,-0.4609522002,-1.0410685492,-0.5354063412,0.3028724456,-0.6835374219,0.169591233
biasActivation=0,1,1
[BASIC:ACTIVATION]
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationLinear"
encog,BasicNetwork,java,3.4.0,1,1548158734516
[BASIC]
[BASIC:PARAMS]
[BASIC:NETWORK]
beginTraining=0
connectionLimit=0
contextTargetOffset=0,0,0
contextTargetSize=0,0,0
endTraining=2
hasContext=f
inputCount=4
layerCounts=4,8,5
layerFeedCounts=4,7,4
layerContextCount=0,0,0
layerIndex=0,4,12
output=0.6991387348,-0.8711034513,-0.996886038,-0.832747291,-0.0935682806,-0.9996163977,0.5399150265,0.9411173394,-0.5084989975,0.4850010791,0.9999999957,1,0,-0.6666666667,-0.4545454545,0.6949152542,1
outputCount=4
weightIndex=0,32,67
weights=-2.6901880743,0.6512821123,-1.2270002115,1.63124668,0.1982387305,-0.2994789552,1.5833040739,-0.9450411677,2.0541422847,-0.718279397,-1.1761952241,0.5028631512,0.0690323612,-1.496141565,-0.1955149568,-0.7453976822,-0.3691141073,0.9854755554,2.2113850088,-1.5216550292,0.9652087936,-1.3028209693,-1.3346156171,0.4142247818,1.0821207364,0.1987534858,0.6202881884,-0.2940331887,-1.4643282498,2.6960334656,-0.0167663298,-2.9907087565,0.3469960227,-0.0441249736,-2.5998575813,-0.7106361301,-0.8111809962,2.2216158678,-0.5482762437,-1.7996398291,-3.6734127565,-2.9102547958,0.4845401914,0.3760471288,-0.0124987546,0.3784047483,0.5860932613,-0.2682876707,0.7429004186,-7.559247176,-3.4421363532,1.1989747484,-2.3340717496,-1.4740773042,-0.7795788072,-1.8241693655,-0.630132295,-0.8191869009,-0.4060569987,-1.0997423162,-0.5495165849,0.1407829068,-2.2964930412,0.0798893221,-19.5271913755,2.0474187009,-0.2622671892
biasActivation=0,1,1
[BASIC:ACTIVATION]
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationTANH"
"org.encog.engine.network.activation.ActivationLinear"
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
filePattern="logs/jastadd-mquat-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
package de.tudresden.inf.st.eraser.learner_test;
import org.junit.Test;
import static org.junit.Assert.fail;
/**
* TODO: Add description.
*
* @author rschoene - Initial contribution
*/
public class ATest {
@Test
public void test1() {
fail();
}
}
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/eraser.log"
filePattern="logs/eraser-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="info">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
/build/
repositories {
mavenCentral()
}
sourceCompatibility = 1.8
apply plugin: 'java'
apply plugin: 'application'
dependencies {
compile project(':eraser-base')
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.8'
compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.11.1'
compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.11.1'
testCompile group: 'junit', name: 'junit', version: '4.12'
testCompile group: 'org.hamcrest', name: 'hamcrest-junit', version: '2.0.0.0'
}
run {
mainClassName = 'de.tudresden.inf.st.eraser.ml_test_boqi.Main'
standardInput = System.in
if (project.hasProperty("appArgs")) {
args Eval.me(appArgs)
}
}
sourceSets {
main {
java {
srcDir 'src/main/java'
}
}
}
package de.tudresden.inf.st.eraser.ml_test_boqi;
import de.tudresden.inf.st.eraser.jastadd.model.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.math3.stat.StatUtils;
import de.tudresden.inf.st.eraser.jastadd.model.Item;
public class Main {
private static final Logger logger = LogManager.getLogger(Main.class);
public static void main(String[] args) {
logger.info("Hello World!");
createAndTestBrightnessNetwork();
}
private static Root createModel() {
Root model = Root.createEmptyRoot();
Group group = new Group();
group.setID("Group1");
model.addGroup(group);
// inputs items muss normalize 1.0, 0.06666666666666665, 0.4545454545454546, -0.5593220338983051, 1(bias)
NumberItem monthItem = new NumberItem();
monthItem.setState(-1.0);
monthItem.setID("month");
monthItem.setLabel("datetime-month");
NumberItem dayItem = new NumberItem();
dayItem.setState(0.2666666666666666);
dayItem.setID("day");
dayItem.setLabel("datetime-day");
NumberItem hourItem = new NumberItem();
hourItem.setState(-0.6363636363636364);
hourItem.setID("hour");
hourItem.setLabel("datetime-hour");
NumberItem minuteItem = new NumberItem();
minuteItem.setState(-0.5593220338983051);
minuteItem.setID("minute");
minuteItem.setLabel("datetime-minute");
NumberItem biasItem = new NumberItem();
biasItem.setState(1);
biasItem.setID("bias");
biasItem.setLabel("bias");
group.addItem(monthItem);
group.addItem(dayItem);
group.addItem(hourItem);
group.addItem(minuteItem);
group.addItem(biasItem);
return model;
}
private static final int REPETITIONS = 1;
private static void classifyTimed(
NeuralNetworkRoot nn,
Function<NeuralNetworkRoot, DoubleNumber> classify,
Function<DoubleNumber, String> leafToString) {
List<String> results = new ArrayList<>();
List<Long> times = new ArrayList<>();
long before = System.nanoTime();
DoubleNumber classification = classify.apply(nn);
long diff = System.nanoTime() - before;
results.add(leafToString.apply(classification));
times.add(TimeUnit.NANOSECONDS.toMillis(diff));
logger.info("Classification results: {}", results);
logger.info("Took {}ms", String.join("ms, ", times.stream().map(l -> Long.toString(l)).collect(Collectors.toList())));
logger.info("Took on average: {}ms",
Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).average().orElse(-1));
logger.info("Took on median: {}ms",
Arrays.stream(times.toArray(new Long[0])).mapToLong(l -> l).sorted()
.skip((REPETITIONS - 1) / 2).limit(2 - REPETITIONS % 2).average().orElse(Double.NaN));
}
/**
* Purpose: Create a neural network with 3 layers (5 + 8 + 4 neurons)
*/
private static void createAndTestBrightnessNetwork() {
Root model = createModel();
Item monthItem = model.resolveItem("month").orElseThrow(
() -> new RuntimeException("Month not found"));
Item dayItem = model.resolveItem("day").orElseThrow(
() -> new RuntimeException("Day not found"));
Item hourItem = model.resolveItem("hour").orElseThrow(
() -> new RuntimeException("Hour not found"));
Item minuteItem = model.resolveItem("minute").orElseThrow(
() -> new RuntimeException("Minute not found"));
Item biasItem = model.resolveItem("bias").orElseThrow(
() -> new RuntimeException("Bias not found"));
NeuralNetworkRoot nn = new NeuralNetworkRoot();
DoubleArrayDoubleFunction sigmoid = inputs -> Math.signum(Arrays.stream(inputs).sum());
DoubleArrayDoubleFunction tanh= inputs ->Math.tanh(Arrays.stream(inputs).sum());
DoubleArrayDoubleFunction function_one= inputs->function_one();
//Weights outputs from learner Module
ArrayList<Double> weights= new ArrayList<Double>(Arrays.asList(
-4.8288886204,0.6723236931,2.1451097188,-0.8551053267,-0.7858304445,4.1369566727,-3.3096691918,
-0.2190980261,2.6871317298,1.2272772167,-2.5292510941,-1.2860407542,-4.2280191541,1.004752063,
0.8345207039,0.0123185817,-0.5921808915,0.0967336988,-0.305892589,0.5572392781,-0.7190098073,
-1.6247354373,0.4589248822,-0.0269816271,2.2208040852,-3.6281085698,0.2204999381,4.7263701556,
-4.8348948698,0.231141867,8.7120706018,-1.4912707741,0.9482851705,0.1377551973,-6.6525856465,
-1.321197315,-2.7369948929,17.664289214,-3.1279212743,-0.8245974167,-1.4251924355,0.8370511414,
2.0841638143,-0.210152817,-1.9414132298,-1.7973688846,-2.1977997794,-3.6046836685,-3.3403186721,
-6.1556924635,-2.8952903587,-1.0773989561,0.2300429028,-0.2184650371,0.0297181797,0.5709092417,
1.3960358442,-3.1577981239,0.0423944625,-17.8143314027,-1.4439317172,-0.5137688896,1.0166045804,
0.3059149818,1.0938282764,0.6203368549,0.702449827));
// input layer
InputNeuron month = new InputNeuron();
month.setItem(monthItem);
InputNeuron day = new InputNeuron();
day.setItem(dayItem);
InputNeuron hour = new InputNeuron();
hour.setItem(hourItem);
InputNeuron minute = new InputNeuron();
minute.setItem(minuteItem);
InputNeuron bias = new InputNeuron();
bias.setItem(biasItem);
nn.addInputNeuron(month);
nn.addInputNeuron(day);
nn.addInputNeuron(hour);
nn.addInputNeuron(minute);
nn.addInputNeuron(bias);
// output layer
OutputLayer outputLayer = new OutputLayer();
OutputNeuron output0 = new OutputNeuron();
output0.setActivationFormula(tanh);
OutputNeuron output1 = new OutputNeuron();
output1.setActivationFormula(tanh);
OutputNeuron output2 = new OutputNeuron();
output2.setActivationFormula(tanh);
OutputNeuron output3 = new OutputNeuron();
output3.setActivationFormula(tanh);
outputLayer.addOutputNeuron(output0);
outputLayer.addOutputNeuron(output1);
outputLayer.addOutputNeuron(output2);
outputLayer.addOutputNeuron(output3);
outputLayer.setCombinator(inputs->predictor(inputs));
nn.setOutputLayer(outputLayer);
// hidden layer
HiddenNeuron[] hiddenNeurons = new HiddenNeuron[8];
for (int i = 0; i < (hiddenNeurons.length); i++) {
if (i==7){
HiddenNeuron hiddenNeuron = new HiddenNeuron();
hiddenNeuron.setActivationFormula(function_one);
hiddenNeurons[i] = hiddenNeuron;
nn.addHiddenNeuron(hiddenNeuron);
bias.connectTo(hiddenNeuron,1.0);
hiddenNeuron.connectTo(output0, weights.get(i));
hiddenNeuron.connectTo(output1, weights.get(i+8));
hiddenNeuron.connectTo(output2, weights.get(i+8*2));
hiddenNeuron.connectTo(output3, weights.get(i+8*3));
}
else{
HiddenNeuron hiddenNeuron = new HiddenNeuron();
hiddenNeuron.setActivationFormula(tanh);
hiddenNeurons[i] = hiddenNeuron;
nn.addHiddenNeuron(hiddenNeuron);
month.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4)+i*5));
day.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+1)+i*5));
hour.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+2)+i*5));
minute.connectTo(hiddenNeuron, weights.get((hiddenNeurons.length*4+3)+i*5));
bias.connectTo(hiddenNeuron,weights.get((hiddenNeurons.length*4+4)+i*5));
hiddenNeuron.connectTo(output0, weights.get(i));
hiddenNeuron.connectTo(output1, weights.get(i+8));
hiddenNeuron.connectTo(output2, weights.get(i+8*2));
hiddenNeuron.connectTo(output3, weights.get(i+8*3));}
}
model.getMachineLearningRoot().setPreferenceLearning(nn);
System.out.println(model.prettyPrint());
classifyTimed(nn, NeuralNetworkRoot::classify,
classification -> Double.toString(classification.number));
}
private static double function_one() {
return 1.0;
}
private static double predictor(double[] inputs) {
int index=0;
double maxinput=StatUtils.max(inputs);
System.out.println(inputs);
for (int i = 0; i < inputs.length; i++)
{
if (inputs[i] == maxinput){
index=i;
}
}
//outputs from learner
ArrayList<Double> outputs= new ArrayList<Double>(Arrays.asList(2.0,1.0,3.0,0.0));
double output=outputs.get(index);
return output;
}
}
//inputs:
//[BasicMLData:-1.0,0.2666666666666666,-0.6363636363636364,-0.5593220338983051]
//outputs:
//[BasicMLData:-0.9151867668336432,-0.1568555041251098,-0.9786996639280675,-0.9436628188408074]
//[7, 20, 12, 13] -> predicted: 1(correct: 2)
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/jastadd-mquat.log"
filePattern="logs/jastadd-mquat-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
</Loggers>
</Configuration>
package de.tudresden.inf.st.eraser.ml_test_boqi;
import de.tudresden.inf.st.eraser.jastadd.model.*;
import org.junit.Test;
import java.util.Set;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.fail;
/**
* TODO: Add description.
*
* @author rschoene - Initial contribution
*/
public class ATest {
@Test
public void test1() {
fail();
}
}
time,topic,qos,message
0,oh2/out/iris1_item/state,0,1
1,oh2/out/iris1_item/state,0,0
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="Console">
<PatternLayout pattern="%highlight{%d{HH:mm:ss.SSS} %-5level} %c{1.} - %msg%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/eraser.log"
filePattern="logs/eraser-%i.log">
<PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n"/>
<Policies>
<OnStartupTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
</Appenders>
<Loggers>
<Root level="info">
<AppenderRef ref="Console"/>
<AppenderRef ref="RollingFile"/>
</Root>
<Logger name="de.tudresden.inf.st.eraser.openhab2.mqtt" level="DEBUG" additivity="false">
<Appender-ref ref="Console"/>
</Logger>
</Loggers>
</Configuration>
Bundle resources go in here!
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment