Skip to content

Commit

Permalink
Code cleanups and warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
smarthi committed Dec 23, 2015
1 parent b84e8ac commit 77cea83
Show file tree
Hide file tree
Showing 9 changed files with 28 additions and 56 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,6 @@ public class AdaptiveBagging implements Learner, Configurable {
/** The distributor processor. */
private BaggingDistributorProcessor distributorP;

/** The input streams for the ensemble, one per member. */
private Stream[] ensembleStreams;

/** The result stream. */
protected Stream resultStream;

Expand Down Expand Up @@ -116,7 +113,8 @@ protected void setLayout() {
}
}

ensembleStreams = new Stream[ensembleSize];
/* The input streams for the ensemble, one per member. */
Stream[] ensembleStreams = new Stream[ensembleSize];
for (int i = 0; i < ensembleSize; i++) {
ensembleStreams[i] = builder.createStream(distributorP);
builder.connectInputShuffleStream(ensembleStreams[i], ensemble[i].getInputProcessor()); // connect streams one-to-one with ensemble members (the type of connection does not matter)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,6 @@ protected void setLayout() {
Stream predictionStream = this.builder.createStream(distributorP);
this.builder.connectInputKeyStream(predictionStream, classifier.getInputProcessor());

// distributorP.setOutputStream(testingStream);
// distributorP.setPredictionStream(predictionStream);

// Addition to Bagging: stream to train
/* The training stream. */
Stream trainingStream = this.builder.createStream(predictionCombinerP);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ private double[] getVotesForInstance(Instance instance) {
int numberOfRulesCovering = 0;

for (ActiveRule rule : ruleSet) {
if (rule.isCovering(instance) == true) {
if (rule.isCovering(instance)) {
numberOfRulesCovering++;
double[] vote = rule.getPrediction(instance);
double error = rule.getCurrentError();
Expand All @@ -179,9 +179,8 @@ private double[] getVotesForInstance(Instance instance) {
double error = defaultRule.getCurrentError();
errorWeightedVote.addVote(vote, error);
}
double[] weightedVote = errorWeightedVote.computeWeightedVote();

return weightedVote;
return errorWeightedVote.computeWeightedVote();
}

public ErrorWeightedVote newErrorWeightedVote() {
Expand Down Expand Up @@ -223,13 +222,13 @@ public void trainOnInstanceImpl(Instance instance) {
Iterator<ActiveRule> ruleIterator = this.ruleSet.iterator();
while (ruleIterator.hasNext()) {
ActiveRule rule = ruleIterator.next();
if (rule.isCovering(instance) == true) {
if (rule.isCovering(instance)) {
rulesCoveringInstance = true;
if (isAnomaly(instance, rule) == false) {
if (!isAnomaly(instance, rule)) {
// Update Change Detection Tests
double error = rule.computeError(instance); // Use adaptive mode error
boolean changeDetected = ((RuleActiveRegressionNode) rule.getLearningNode()).updateChangeDetection(error);
if (changeDetected == true) {
if (changeDetected) {
ruleIterator.remove();
} else {
rule.updateStatistics(instance);
Expand All @@ -245,10 +244,10 @@ public void trainOnInstanceImpl(Instance instance) {
}
}

if (rulesCoveringInstance == false) {
if (!rulesCoveringInstance) {
defaultRule.updateStatistics(instance);
if (defaultRule.getInstancesSeen() % this.gracePeriod == 0.0) {
if (defaultRule.tryToExpand(this.splitConfidence, this.tieThreshold) == true) {
if (defaultRule.tryToExpand(this.splitConfidence, this.tieThreshold)) {
ActiveRule newDefaultRule = newRule(defaultRule.getRuleNumberID(),
(RuleActiveRegressionNode) defaultRule.getLearningNode(),
((RuleActiveRegressionNode) defaultRule.getLearningNode()).getStatisticsOtherBranchSplit()); // other branch
Expand All @@ -274,7 +273,7 @@ private boolean isAnomaly(Instance instance, ActiveRule rule) {
// AMRUles is equipped with anomaly detection. If on, compute the anomaly
// value.
boolean isAnomaly = false;
if (this.noAnomalyDetection == false) {
if (!this.noAnomalyDetection) {
if (rule.getInstancesSeen() >= this.anomalyNumInstThreshold) {
isAnomaly = rule.isAnomaly(instance,
this.univariateAnomalyprobabilityThreshold,
Expand Down Expand Up @@ -308,19 +307,19 @@ private ActiveRule newRule(int ID, RuleActiveRegressionNode node, double[] stati
}
}
}
if (statistics != null && ((RuleActiveRegressionNode) r.getLearningNode()).getTargetMean() != null)
if (statistics != null && (r.getLearningNode()).getTargetMean() != null)
{
double mean;
if (statistics[0] > 0) {
mean = statistics[1] / statistics[0];
((RuleActiveRegressionNode) r.getLearningNode()).getTargetMean().reset(mean, (long) statistics[0]);
(r.getLearningNode()).getTargetMean().reset(mean, (long) statistics[0]);
}
}
return r;
}

private ActiveRule newRule(int ID) {
ActiveRule r = new ActiveRule.Builder().
return new ActiveRule.Builder().
threshold(this.pageHinckleyThreshold).
alpha(this.pageHinckleyAlpha).
changeDetection(this.driftDetection).
Expand All @@ -329,7 +328,6 @@ private ActiveRule newRule(int ID) {
learningRatio(this.learningRatio).
numericObserver(numericObserver).
id(ID).build();
return r;
}

/*
Expand All @@ -342,7 +340,7 @@ public void onCreate(int id) {
this.ruleNumberID = 0;
this.defaultRule = newRule(++this.ruleNumberID);

this.ruleSet = new LinkedList<ActiveRule>();
this.ruleSet = new LinkedList<>();
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,6 @@ public class ActiveRule extends LearningRule {

private static final long serialVersionUID = 1L;

private double[] statisticsOtherBranchSplit;

private Builder builder;

private RuleActiveRegressionNode learningNode;
Expand Down Expand Up @@ -89,10 +87,6 @@ public void setLearningNode(RuleRegressionNode learningNode) {
this.learningNode = (RuleActiveRegressionNode) learningNode;
}

public double[] statisticsOtherBranchSplit() {
return this.statisticsOtherBranchSplit;
}

public RuleSplitNode getLastUpdatedRuleSplitNode() {
return this.lastUpdatedRuleSplitNode;
}
Expand All @@ -104,7 +98,6 @@ public static class Builder implements Serializable {

private static final long serialVersionUID = 1712887264918475622L;
protected boolean changeDetection;
protected boolean usePerceptron;
protected double threshold;
protected double alpha;
protected int predictionFunction;
Expand All @@ -115,8 +108,6 @@ public static class Builder implements Serializable {

protected FIMTDDNumericAttributeClassLimitObserver numericObserver;

protected double lastTargetMean;

public int id;

public Builder() {
Expand Down Expand Up @@ -182,8 +173,7 @@ public ActiveRule build() {
*/
public boolean tryToExpand(double splitConfidence, double tieThreshold) {

boolean shouldSplit = this.learningNode.tryToExpand(splitConfidence, tieThreshold);
return shouldSplit;
return this.learningNode.tryToExpand(splitConfidence, tieThreshold);

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,7 @@ public class AMRLearnerProcessor implements Processor {
*/
private static final long serialVersionUID = -2302897295090248013L;

private static final Logger logger =
LoggerFactory.getLogger(AMRLearnerProcessor.class);

private int processorId;
private static final Logger logger = LoggerFactory.getLogger(AMRLearnerProcessor.class);

private transient List<ActiveRule> ruleSet;

Expand Down Expand Up @@ -105,10 +102,10 @@ private void trainRuleOnInstance(int ruleID, Instance instance) {
ActiveRule rule = ruleIterator.next();
if (rule.getRuleNumberID() == ruleID) {
// Check (again) for coverage
if (rule.isCovering(instance) == true) {
if (rule.isCovering(instance)) {
double error = rule.computeError(instance); // Use adaptive mode error
boolean changeDetected = ((RuleActiveRegressionNode) rule.getLearningNode()).updateChangeDetection(error);
if (changeDetected == true) {
if (changeDetected) {
ruleIterator.remove();

this.sendRemoveRuleEvent(ruleID);
Expand Down Expand Up @@ -137,7 +134,7 @@ private boolean isAnomaly(Instance instance, LearningRule rule) {
// AMRUles is equipped with anomaly detection. If on, compute the anomaly
// value.
boolean isAnomaly = false;
if (this.noAnomalyDetection == false) {
if (!this.noAnomalyDetection) {
if (rule.getInstancesSeen() >= this.anomalyNumInstThreshold) {
isAnomaly = rule.isAnomaly(instance,
this.univariateAnomalyprobabilityThreshold,
Expand Down Expand Up @@ -167,8 +164,7 @@ private boolean addRule(ActiveRule rule) {

@Override
public void onCreate(int id) {
this.processorId = id;
this.ruleSet = new LinkedList<ActiveRule>();
this.ruleSet = new LinkedList<>();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,7 @@ public class AMRulesStatisticsProcessor implements Processor {
*/
private static final long serialVersionUID = 5268933189695395573L;

private static final Logger logger =
LoggerFactory.getLogger(AMRulesStatisticsProcessor.class);

private int processorId;
private static final Logger logger = LoggerFactory.getLogger(AMRulesStatisticsProcessor.class);

private transient List<ActiveRule> ruleSet;

Expand Down Expand Up @@ -98,10 +95,10 @@ private void trainRuleOnInstance(int ruleID, Instance instance) {
if (rule.getRuleNumberID() == ruleID) {
// Check (again) for coverage
// Skip anomaly check as Aggregator's perceptron should be well-updated
if (rule.isCovering(instance) == true) {
if (rule.isCovering(instance)) {
double error = rule.computeError(instance); // Use adaptive mode error
boolean changeDetected = ((RuleActiveRegressionNode) rule.getLearningNode()).updateChangeDetection(error);
if (changeDetected == true) {
if (changeDetected) {
ruleIterator.remove();

this.sendRemoveRuleEvent(ruleID);
Expand Down Expand Up @@ -143,8 +140,7 @@ private boolean addRule(ActiveRule rule) {

@Override
public void onCreate(int id) {
this.processorId = id;
this.ruleSet = new LinkedList<ActiveRule>();
this.ruleSet = new LinkedList<>();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,7 @@ final public class LocalClustererProcessor implements Processor {
*
*/
private static final long serialVersionUID = -1577910988699148691L;
private static final Logger logger = LoggerFactory
.getLogger(LocalClustererProcessor.class);
private static final Logger logger = LoggerFactory.getLogger(LocalClustererProcessor.class);
private LocalClustererAdapter model;
private Stream outputStream;
private int modelId;
Expand Down Expand Up @@ -84,7 +83,7 @@ public LocalClustererAdapter getLearner() {
* Set the output streams.
*
* @param outputStream
* the new output stream {@link PredictionCombinerPE}.
* the new output stream {@link Stream}.
*/
public void setOutputStream(Stream outputStream) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ public void init(TopologyBuilder builder, Instances dataset, int parallelism) {

protected void setLayout() {
learnerP = new LocalClustererProcessor();
LocalClustererAdapter learner = (LocalClustererAdapter) this.learnerOption.getValue();
LocalClustererAdapter learner = this.learnerOption.getValue();
learner.setDataset(this.dataset);
learnerP.setLearner(learner);

Expand All @@ -96,7 +96,6 @@ public Processor getInputProcessor() {
*/
@Override
public Set<Stream> getResultStreams() {
Set<Stream> streams = ImmutableSet.of(this.resultStream);
return streams;
return ImmutableSet.of(this.resultStream);
}
}
1 change: 0 additions & 1 deletion samoa-local/src/test/java/org/apache/samoa/AlgosTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
* #L%
*/

import org.apache.samoa.LocalDoTask;
import org.junit.Test;

public class AlgosTest {
Expand Down

0 comments on commit 77cea83

Please sign in to comment.