Apriori Sets And Sequences - Keith's MS Thesis
Apriori Sets And Sequences
About
Code
Performance Data Collection
·
Data Sets
Documents
Results
·
                                          
Printer Friendly Version
Performance Data Collection

Intro ] Automatically Starting - Stopping ] BackPropagation.class ]
BackPropagation.java ] DummyWait.class ] DummyWait.java ]
IB1.class ] IB1.java ] IBk.class ]
IBk.java ] NaiveBayesSimple.class ] NaiveBayesSimple.java ]
Start-System-General ] System-General ] [ perf-data-Makefile ]
start general collection ]

data ]

## This is for Windows Only.

## This runs different classifier algorithms in Weka and collects
## performance data for the computer system as Weka runs.

## The performance data for each run of Weka is stored in a single
## CSV (Comma Seperated Value) file named to denote the month, 
## day, hour and minute when the test started.

## Each CSV file is then tranformed into an ARFF file. In this ARFF
## file each instance represents one point in time for which a single value
## is given for each attribute, or performance metric captured. 

## Each ARFF file is then transformed into a single Weka instance and added
## to the overall database file for performance data, perfdata.arff.

##### CLASS PATHS #####
CLASSPATH = ".;C:\Weka-ARMiner-Time_Sequence"
CYGWINCLASSPATH = ".;../Weka-ARMiner-Time_Sequence"
CLASSPATHFLAG = -classpath $(CLASSPATH)
CYGWINCLASSPATHFLAG = -classpath $(CYGWINCLASSPATH)
##### /CLASS PATHS #####

##### JAVA JVM #####
MEMORYFLAGS = -Xms961k -Xmx1024m
JAVA = java $(MEMORYFLAGS)
##### /JAVA JVM #####

##### TRAINING AND TEST DATA #####
# directory where census income data can be found
DATAHOME = data/
# place to store filtered data sets
DATAFILTER = $(DATAHOME)filtered/
# directory for storing results of learning algorithm tests
TESTRESULTS = $(DATAHOME)results/
# data for training machine learning algorithm
TRAINDATA = $(DATAHOME)census-income.data
#data for testing machine learning algorithm
TESTDATA = $(DATAHOME)census-income.test
# normalized data
NORMALIZED-TRAINDATA = $(DATAFILTER)census-income-normalized.data
NORMALIZED-TESTDATA = $(DATAFILTER)census-income-normalized.test
# discretized data
DISCRETIZED-TRAINDATA = $(DATAFILTER)census-income-discretized.data
DISCRETIZED-TESTDATA = $(DATAFILTER)census-income-discretized.test
# normalized then discretized data
NORMALIZED-DISCRETIZED-TRAINDATA = $(DATAFILTER)census-income-normalized-discretized.data
NORMALIZED-DISCRETIZED-TESTDATA = $(DATAFILTER)census-income-normalized-discretized.test
# normalized first 5000 instances
NORMALIZED-5000-TRAINDATA = $(DATAFILTER)census-income-normalized-5000.data
# no missing data
NOMISSING-TRAINDATA = $(DATAFILTER)nomissing-census-income.data
NOMISSING-TESTDATA = $(DATAFILTER)nomissing-census-income.test
##### /TRAINING AND TEST DATA #####

##### MACHINE LEARNING ALGORITHMS #####
LEARNING-ALGORITHM = learning-algorithm
# decision tree
DECISIONTREE = weka.classifiers.j48.J48
# instance based
INSTANCEBASED = IBk
# naive bayes
NAIVEBAYES = weka.classifiers.NaiveBayes
NAIVEBAYES-SIMPLE = NaiveBayesSimple
# neural network
NEURALNETWORK = BackPropagation
##### /MACHINE LEARNING ALGORITHMS #####

# directory where perf data will be found
PERFHOME = perfdata/
# directory where the perf raw data as csv files will be found
PERFRAW = $(PERFHOME)raw/
# directory where the perf data is stored one instance per file
PERFINSTANCES = $(PERFHOME)instances/
# main data file for perf data
PERFDATA = $(PERFHOME)perfdata.arff
# Temporary places to put stuff
TEMPDIR = temp/
TEMPFILE1 = $(TEMPDIR)kaptemp1
TEMPFILE2 = $(TEMPDIR)kaptemp2
# file containing the name of the last file created in perf home
LASTFILE = $(TEMPDIR)last.txt

RELATIONNAME = weka-peroformance
DATAFROM = data-from

# things most tests rely on
NEED = $(TESTRESULTS) $(TEMPDIR) $(PERFDATA) $(PERFINSTANCES)

##### Frequently Used Commands #####
# start performance data collection

# This starts a dummy java process to force the collection
# of java process information. there is a bug in MS W2K (and NT I believe)
# Perfmon that does not collect info on processes using the wildcard
# for processes unless the process is already running when the service starts
#
# We sleep for 30 seconds to ensure that the dummy java process finishes
# before we continue. This way the dummy process will not effect in any way
# the results of the java process we actually wish to monitor.
STARTPERF = bash -c 'bash -c "java -classpath . DummyWait &"; net start "Performance Logs and Alerts"; sleep 30'

# stop performance data collection

# This stops the data collection. We wait 30 seconds before actually
# stopping to obtain one or two more points of data. this is so we
# can be more likely to capture information related to memory cleanup etc.
STOPPERF = bash -c 'sleep 30; net stop "Performance Logs and Alerts"'

# list the directory contents of perfraw sorted by time, 1 file per line
# and store only the first line in a file
LAST = ls -t1 $(PERFRAW) | head -1 > $(LASTFILE)
LASTFILENAME = `cat $(LASTFILE)`

# Converts the last csv file generated into an ARFF file consisting of
# a single instance.
#
# Use the CSV Loader included with Weka to generate an ARFF file.
# this file will have each point along the time line as a single instance.
# We take this and create a single instance using time sequence attributes
# and store the results in a file.
LASTARFF = bash -c '$(LAST); java weka.core.converters.CSVLoader $(PERFRAW)$(LASTFILENAME) > $(PERFRAW)$(LASTFILENAME).arff; java $(CYGWINCLASSPATHFLAG) wpi.filters.TimeSequenceInstanceFilter -i $(PERFRAW)$(LASTFILENAME).arff -o $(TEMPFILE1)'

# Adds the last generated single instance ARFF file to the master database 
# file. A backup is made of the master database file before adding is done.
# It would be nice to add some error checking here but it might be too costly
# once the database file grows larger.
ADDLAST = bash -c 'cp $(PERFDATA) $(PERFDATA).backup; java $(CYGWINCLASSPATHFLAG) wpi.timesequence.AppendInstances -t $(PERFDATA) -a $(PERFINSTANCES)$(LASTFILENAME).arff > $(TEMPFILE1); cp $(TEMPFILE1) $(PERFDATA)'
##### /Frequently Used Commands #####

# To run a test:
# start performance data collection
#
#	start the dummy java process
#		This forces Windows 2000 Performance Monitoring
#		to collect info about processes named "java"
#
#	 sleep 30 seconds 
#		so dummy java has time to quit
#
# run learning algorithm
#
# stop performance data collection
#
#	sleep 30 seconds
#		so we get a point or two after in our performance data
#
# create an ARFF file from the newest CSV file
# 
#	get the name of the new perf data csv file
#
#	change this arff file into a single instance using
# 	the filter TimeSequenceInstanceFilter
#
# add extra attributes like: 
# 	name of the machine learning algorithm
#	options used for the algorithm
#	data set used
#	original arff file containing single instance as set of
#	instances (makes it easier to look at the data)
#
# add it to the master database perfdata.arff

all: decisiontree instancebased naivebayes neuralnetwork

##### Decision Tree #####

decisiontree : decisiontree-1 decisiontree-2 decisiontree-3 decisiontree-4 decisiontree-5 decisiontree-6 decisiontree-7 decisiontree-8

##########################################################

# control test, train plain, test plain, no pruning
decisiontree-1 : $(NEED) 
	echo "		decisiontree-1"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -U > $(TESTRESULTS)test-no-pruning.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V $(DECISIONTREE) -L $(DECISIONTREE) -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N option-U -V "-U" -L "-U" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N training-data -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# with pruning
decisiontree-2 : $(NEED)
	echo "		decisiontree-2"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) > $(TESTRESULTS)test-pruning.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-U" -V "none" -L "none" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# error reduced pruning
decisiontree-3 : $(NEED)
	echo "		decisiontree-3"
	$(STARTPERF)
	$(JAVA)	$(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -R > $(TESTRESULTS)test-reduced-error-pruning.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-R" -V "-R" -L "-R" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# try not performing subtree raising
decisiontree-4 : $(NEED)
	echo "		decisiontree-4"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -S > $(TESTRESULTS)test-no-subtree-raising.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-S" -V "-S" -L "-S" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

#  try requiring 15 instances to create a leaf
decisiontree-5 : $(NEED)
	echo "		decisiontree-5"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -M 15 > $(TESTRESULTS)test-15-instances-per-leaf.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-M" -V "15" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# try requiring 20 instances to create a leaf
decisiontree-6 : $(NEED)
	echo "		decisiontree-6"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -M 20 > $(TESTRESULTS)test-20-instances-per-leaf.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-M" -V "20" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

#  try requiring 25 instances to create a leaf
decisiontree-7 : $(NEED)
	echo "		decisiontree-7"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -M 25 > $(TESTRESULTS)test-25-instances-per-leaf.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-M" -V "25" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

#  try requiring 30 instances to create a leaf
decisiontree-8 : $(NEED)
	echo "		decisiontree-8"
	$(STARTPERF)
	$(JAVA) $(DECISIONTREE) -t $(TRAINDATA) -T $(TESTDATA) -M 30 > $(TESTRESULTS)test-15-instances-per-leaf.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(DECISIONTREE)" -L "$(DECISIONTREE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-M" -V "30" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

##### Instance Based #####

instancebased : instancebased-1 instancebased-2 instancebased-3

##########################################################

# just use default parameters and full data set
instancebased-1 : $(NEED)
	echo "		instancebased-1"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(INSTANCEBASED) -t $(TRAINDATA) -T $(TESTDATA) > $(TESTRESULTS)ibk-test-1.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(INSTANCEBASED)" -L "$(INSTANCEBASED)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# use normalized data
instancebased-2 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		instancebased-2"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(INSTANCEBASED) -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)ibk-test-2-normalized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(INSTANCEBASED)" -L "$(INSTANCEBASED)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# use discretized data
instancebased-3 : $(NEED) $(DISCRETIZED-TRAINDATA) $(DISCRETIZED-TESTDATA)
	echo "		instancebased-3"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(INSTANCEBASED) -t $(DISCRETIZED-TRAINDATA) -T $(DISCRETIZED-TESTDATA) > $(TESTRESULTS)ibk-test-3-discretized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(INSTANCEBASED)" -L "$(INSTANCEBASED)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(DISCRETIZED-TRAINDATA)" -L "$(DISCRETIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

##### Naive Bayes #####

naivebayes : naivebayes-1 naivebayes-2 naivebayes-3 naivebayes-4 naivebayes-5 naivebayes-6 naivebayes-7 naivebayes-8 naivebayes-9 naivebayes-10

##########################################################

# normalized, fancy
naivebayes-1 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		naivebayes-1"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES) -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)naivebayes-fancy-test-1-normalized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES)" -L "$(NAIVEBAYES)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# discretized, fancy
naivebayes-2 : $(NEED) $(DISCRETIZED-TRAINDATA) $(DISCRETIZED-TESTDATA)
	echo "		naivebayes-2"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES) -t $(DISCRETIZED-TRAINDATA) -T $(DISCRETIZED-TESTDATA) > $(TESTRESULTS)naivebayes-fancy-test-2-discretized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES)" -L "$(NAIVEBAYES)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(DISCRETIZED-TRAINDATA)" -L "$(DISCRETIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# normalized the discretized, fancy
naivebayes-3 : $(NEED) $(NORMALIZED-DISCRETIZED-TRAINDATA) $(NORMALIZED-DISCRETIZED-TESTDATA)
	echo "		naivebayes-3"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES) -t $(NORMALIZED-DISCRETIZED-TRAINDATA) -T $(NORMALIZED-DISCRETIZED-TESTDATA) > $(TESTRESULTS)naivebayes-fancy-test-3-normalized-discretized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES)" -L "$(NAIVEBAYES)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-DISCRETIZED-TRAINDATA)" -L "$(NORMALIZED-DISCRETIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# normalized, simple
naivebayes-4 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		naivebayes-4"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES-SIMPLE) -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)naivebayes-simple-test-4-normalized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES-SIMPLE)" -L "$(NAIVEBAYES-SIMPLE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# discretized, simple
naivebayes-5 : $(NEED) $(DISCRETIZED-TRAINDATA) $(DISCRETIZED-TESTDATA)
	echo "		naivebayes-5"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES-SIMPLE) -t $(DISCRETIZED-TRAINDATA) -T $(DISCRETIZED-TESTDATA) > $(TESTRESULTS)naivebayes-simple-test-5-discretized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES-SIMPLE)" -L "$(NAIVEBAYES-SIMPLE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(DISCRETIZED-TRAINDATA)" -L "$(DISCRETIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# normalized the discretized, simple
naivebayes-6 : $(NEED) $(NORMALIZED-DISCRETIZED-TRAINDATA) $(NORMALIZED-DISCRETIZED-TESTDATA)
	echo "		naivebayes-6"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES-SIMPLE) -t $(NORMALIZED-DISCRETIZED-TRAINDATA) -T $(NORMALIZED-DISCRETIZED-TESTDATA) > $(TESTRESULTS)naivebayes-simple-test-6-normalized-discretized.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES-SIMPLE)" -L "$(NAIVEBAYES-SIMPLE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-DISCRETIZED-TRAINDATA)" -L "$(NORMALIZED-DISCRETIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# regular data simple
naivebayes-7 : $(NEED)
	echo "		naivebayes-7"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES-SIMPLE) -t $(TRAINDATA) -T $(TESTDATA) > $(TESTRESULTS)naivebayes-simple-test-7.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES-SIMPLE)" -L "$(NAIVEBAYES-SIMPLE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# regular data fancy
naivebayes-8 : $(NEED)
	echo "		naivebayes-8"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES) -t $(TRAINDATA) -T $(TESTDATA) > $(TESTRESULTS)naivebayes-fancy-test-8.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES)" -L "$(NAIVEBAYES)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(TRAINDATA)" -L "$(TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# no missing data fancy
naivebayes-9 : $(NEED) $(NOMISSING-TRAINDATA) $(NOMISSING-TESTDATA)
	echo "		naivebayes-9"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES) -t $(NOMISSING-TRAINDATA) -T $(NOMISSING-TESTDATA) > $(TESTRESULTS)naivebayes-fancy-test-9-nomissing.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES)" -L "$(NAIVEBAYES)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NOMISSING-TRAINDATA)" -L "$(NOMISSING-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# no missing data simple
naivebayes-10 : $(NEED) $(NOMISSING-TRAINDATA) $(NOMISSING-TESTDATA)
	echo "		naivebayes-10"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NAIVEBAYES-SIMPLE) -t $(NOMISSING-TRAINDATA) -T $(NOMISSING-TESTDATA) > $(TESTRESULTS)naivebayes-simple-test-10-nomissing.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NAIVEBAYES-SIMPLE)" -L "$(NAIVEBAYES-SIMPLE)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NOMISSING-TRAINDATA)" -L "$(NOMISSING-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

##### Neural Networks #####

neuralnetwork : neuralnetwork-1 neuralnetwork-2 neuralnetwork-3 neuralnetwork-4 neuralnetwork-5 neuralnetwork-6 neuralnetwork-7 neuralnetwork-8 neuralnetwork-9 neuralnetwork-10 neuralnetwork-11 neuralnetwork-12

##########################################################

# 5000 instances, option set 1
neuralnetwork-1 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-1"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 6000 -H 10 -E 0.0001 -M 0.0001 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-1.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "6000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "10" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0001" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0001" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 2
neuralnetwork-2 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-2"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 12000 -H 10 -E 0.0001 -M 0.0001 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-2.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "12000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "10" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0001" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0001" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 3
neuralnetwork-3 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-3"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 6000 -H 10 -E 0.0112 -M 0.0001 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-3.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "6000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "10" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0112" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0001" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 4
neuralnetwork-4 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-4"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 6000 -H 10 -E 0.0112 -M 0.0445 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-4.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "6000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "10" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0112" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 5
neuralnetwork-5 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-5"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 6000 -H 2 -E 0.0112 -M 0.0445 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-5.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "6000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "2" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0112" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 6
neuralnetwork-6 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-6"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 8000 -H 3 -E 0.0112 -M 0.0445 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-6.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "8000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "3" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0112" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 7
neuralnetwork-7 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-7"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 12000 -H 3 -E 0.0112 -M 0.0445 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-7.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "12000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "3" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0112" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# 5000 instances, option set 8
neuralnetwork-8 : $(NEED) $(NORMALIZED-5000-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-8"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 8000 -H 4 -E 0.0112 -M 0.0445 -t $(NORMALIZED-5000-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-8.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "8000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "4" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.0112" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-5000-TRAINDATA)" -L "$(NORMALIZED-5000-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# all instances, option set 9
neuralnetwork-9 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-9"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 15000 -H 3 -E 0.001 -M 0.0445 -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-9.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "15000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "3" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.001" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# all instances, option set 10
neuralnetwork-10 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-10"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 15000 -H 2 -E 0.001 -M 0.0445 -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-10.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "15000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "2" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.001" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# all instances, option set 11
neuralnetwork-11 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-11"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 15000 -H 4 -E 0.001 -M 0.0445 -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-11.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "15000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "4" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.001" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

# all instances, option set 12
neuralnetwork-12 : $(NEED) $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	echo "		neuralnetwork-12"
	$(STARTPERF)
	$(JAVA) $(CLASSPATHFLAG) $(NEURALNETWORK) -I 25000 -H 3 -E 0.001 -M 0.0445 -t $(NORMALIZED-TRAINDATA) -T $(NORMALIZED-TESTDATA) > $(TESTRESULTS)neuralnetwork-test-12.txt
	$(STOPPERF)
	$(LASTARFF)
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N $(LEARNING-ALGORITHM) -V "$(NEURALNETWORK)" -L "$(NEURALNETWORK)" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-I" -V "25000" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-H" -V "3" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "option-E" -V "0.001" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "option-M" -V "0.0445" -o $(TEMPFILE2)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE2) -N "training-data" -V "$(NORMALIZED-TRAINDATA)" -L "$(NORMALIZED-TRAINDATA)" -o $(TEMPFILE1)'
	bash -c 'java $(CYGWINCLASSPATHFLAG) wpi.filters.AddWithValueFilter -i $(TEMPFILE1) -N "$(DATAFROM)" -V "$(PERFINSTANCES)$(LASTFILENAME).arff" -L "$(PERFINSTANCES)$(LASTFILENAME).arff" -o $(PERFINSTANCES)$(LASTFILENAME).arff'
	$(ADDLAST)

##### How to make the directories and data files we need #####

# place to store filtered data
$(DATAFILTER) :
	mkdir -p $(DATAFILTER)

# place to store machine learning results
$(TESTRESULTS) :
	mkdir -p $(TESTRESULTS)

# place to store performance data
$(PERFHOME) :
	mkdir -p $(PERFHOME)

# place to store raw perf data in csv (from perfmon)
# and arff (to load into Weka easily)
$(PERFRAW) :
	mkdir -p $(PERFRAW)

# place to store instances in seperate files
$(PERFINSTANCES) :
	mkdir -p $(PERFINSTANCES)

# place to store temp files
$(TEMPDIR) :
	mkdir -p $(TEMPDIR)

# creates a template perfdata.arff file, the master
# data base file for storing our results
#
# It is initialized with:
#	relation name "Weka-performance"
#	attribute data-from string
#		this serves to force this attribute to be a string
#		and we need at least one attribute to make a 
#		valid ARFF file
#	data
#		Need a data begin tag for the file to be valid
$(PERFDATA) : $(PERFHOME)
	echo "@relation $(RELATIONNAME)" > $(PERFDATA)
	echo "@attribute $(DATAFROM) string" >> $(PERFDATA)
	echo "@data" >> $(PERFDATA)

# normalized data
$(NORMALIZED-TESTDATA) : $(NORMALIZED-TRAINDATA)

$(NORMALIZED-TRAINDATA) : $(DATAFILTER)
	java $(CLASSPATHFLAG) weka.filters.NormalizationFilter -b -i $(TRAINDATA) -o $(NORMALIZED-TRAINDATA) -r $(TESTDATA) -s $(NORMALIZED-TESTDATA)

# discretized data
$(DISCRETIZED-TESTDATA) : $(DISCRETIZED-TRAINDATA)

$(DISCRETIZED-TRAINDATA) : $(DATAFILTER)
	java $(CLASSPATHFLAG) weka.filters.DiscretizeFilter -b -i $(TRAINDATA) -o $(DISCRETIZED-TRAINDATA) -r $(TESTDATA) -s $(DISCRETIZED-TESTDATA)

# normalized then discretixed data
$(NORMALIZED-DISCRETIZED-TESTDATA) : $(NORMALIZED-DISCRETIZED-TRAINDATA)

$(NORMALIZED-DISCRETIZED-TRAINDATA) : $(NORMALIZED-TRAINDATA) $(NORMALIZED-TESTDATA)
	java weka.filters.DiscretizeFilter -c last -b -i $(NORMALIZED-TRAINDATA) -o $(NORMALIZED-DISCRETIZED-TRAINDATA) -r $(NORMALIZED-TESTDATA) -s $(NORMALIZED-DISCRETIZED-TESTDATA)

# no missing data
$(NOMISSING-TRAINDATA) : $(DATAFILTER)
	cat $(TRAINDATA) | grep -v \? > $(NOMISSING-TRAINDATA)

$(NOMISSING-TESTDATA) : $(DATAFILTER)
	cat $(TESTDATA) | grep -v \? > $(NOMISSING-TESTDATA)

# first 5000 normalized data
# count 35 lines for header or so
$(NORMALIZED-5000-TRAINDATA) : $(NORMALIZED-TRAINDATA)
	head -5035 $(NORMALIZED-TRAINDATA) > $(NORMALIZED-5000-TRAINDATA)

##### Testing perf start and stop #####
# Note: this only works when the service is not running
perftest : 
	$(STARTPERF)
	$(STOPPERF)

##### Testing last #####
lasttest :
	$(LAST)
	cat $(LASTFILE)

by: Keith A. Pray
Last Modified: July 4, 2004 2:32 AM
© 2004 - 1975 Keith A. Pray.
All rights reserved.

Current Theme: