Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
306a5d5
Update adding SSL tab and fast evaluation (capymoa support)
hmgomes Apr 17, 2024
39e8041
updating SOKNL in MOA to the newest version
YibinSun Apr 18, 2024
de78b4d
uploading prediction interval package
YibinSun Apr 19, 2024
4576b9e
SOKNL-test
YibinSun Apr 19, 2024
3aa82d4
Initial commig with Mini-Batch classes
cassales Apr 23, 2024
089a20f
update the windowed PI evaluator to get correct NMPIW results
YibinSun Apr 24, 2024
8bd3381
uploading prediction interval package
YibinSun Apr 19, 2024
3372a81
update the windowed PI evaluator to get correct NMPIW results
YibinSun Apr 24, 2024
bb97c0c
fix: add storing functionality in EfficientEvaluationLoops
May 20, 2024
7aa79a3
new version of the parallel ensembles with minibatch (has the reprodu…
cassales May 21, 2024
c5116a2
fix bug from OzaBag and OzaBagADWIN ensembles
cassales May 27, 2024
49f2c5c
Add instructions to build moa with dependencies
tachyonicClock May 27, 2024
9ed3759
RW_kNN Random
DwayneAcosta Jul 27, 2024
461902f
Revert "RW_kNN Random"
hmgomes Jul 28, 2024
5810b44
fix: fix the instance index for window regression evaluation
Jun 11, 2024
b7e407f
Commented out Random r variable
DwayneAcosta Jul 28, 2024
82ef7a1
Initial working version of PEARL
nuwangunasekara Aug 6, 2024
c44ebfa
set default parameters as per the paper
nuwangunasekara Aug 6, 2024
863a542
Add PEARL tests
nuwangunasekara Aug 6, 2024
e22b25d
Add PEARL tests
nuwangunasekara Aug 6, 2024
14a5c6c
Revert changes ARF tests
nuwangunasekara Aug 6, 2024
3f0a883
Change file name at the comment
nuwangunasekara Aug 6, 2024
d6a9e04
fix: removing a debug print in DBSCAN
cassales Aug 20, 2024
61fa080
add autoclass for autoML
marouabahri Sep 4, 2024
eb132c7
update to autoclass
hmgomes Sep 6, 2024
4cb7176
fix: update to make it compatible with capymoa
hmgomes Sep 7, 2024
e658b02
fix: debugging SRP_MB and TstThnTrn class
YibinSun Feb 18, 2025
f4c4524
Added PLASTIC and the necessary adaptations to existing classes (#23)
heymarco Mar 12, 2025
4124a31
remove '.DS_Store'
tachyonicClock Oct 2, 2025
23b838d
fix compiler errors
tachyonicClock Oct 3, 2025
2c8da79
fix storePredictions and storeY
tachyonicClock Oct 3, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions .github/workflows/capymoa.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: Package Jar for CapyMOA

on:
push:
branches: [ master, capymoa ]
pull_request:
branches: [ master, capymoa ]

jobs:
build:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: '17'
distribution: 'temurin'
cache: maven

- name: Build with Maven
working-directory: ./moa
# no tests
run: mvn -B package --file pom.xml -DskipTests

# Upload jar file as artifact
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: moa-jar
path: ./moa/target/moa-*-jar-with-dependencies.jar
if-no-files-found: error
retention-days: 7
30 changes: 30 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,34 @@
*.iml
*~
*.bak
.DS_Store
.settings
.project

# MacOS folder metadata
.DS_Store

# Compiled class file
*.class

# Log file
*.log

# BlueJ files
*.ctxt

# Mobile Tools for Java (J2ME)
.mtj.tmp/

# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar

# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
replay_pid*
24 changes: 24 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,27 @@ If you want to refer to MOA in a publication, please cite the following JMLR pap
> MOA: Massive Online Analysis; Journal of Machine Learning Research 11: 1601-1604


## Building MOA for CapyMOA

You can now upload the build artifact from the GitHub Actions workflow. **Make sure to unzip it!**

> These steps assume you have Java installed and maven installed. If you don't
> have maven installed, you can download it from
> [here](https://maven.apache.org/download.cgi). You can achieve the same
> outcome with IntelliJ IDEA by [building moa with the IDE](https://moa.cms.waikato.ac.nz/tutorial-6-building-moa-from-the-source/) (The linked doc is a little out of date)
> and [packaging it as a single jar file](https://stackoverflow.com/questions/1082580/how-to-build-jars-from-intellij-idea-properly).

You can compile moa as a single jar file with all dependencies included by running the following command in the `moa` directory:
```bash
cd ./moa
mvn compile assembly:single
```

If successful, the jar file will be built to a file like this `moa/target/moa-2023.04.1-SNAPSHOT-jar-with-dependencies.jar` with a different date.

One way to verify that the jar file was built correctly is to run the following command:
```bash
java -jar ./moa/target/moa-2023.04.1-SNAPSHOT-jar-with-dependencies.jar
```
This should start the MOA GUI.

16 changes: 13 additions & 3 deletions moa/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -251,14 +251,24 @@
<artifactId>license-maven-plugin</artifactId>
</plugin>

<plugin>
<!-- TODO: Commented out because my build was failing -->
<!-- <plugin>
<groupId>com.github.fracpete</groupId>
<artifactId>latex-maven-plugin</artifactId>
</plugin>
</plugin> -->

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>moa.gui.GUI</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>

<plugin>
Expand Down
21 changes: 20 additions & 1 deletion moa/src/main/java/moa/classifiers/AbstractClassifier.java
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@ public void prepareForUseImpl(TaskMonitor monitor,
resetLearning();
}
}


@Override
public double[] getVotesForInstance(Example<Instance> example){
Expand All @@ -105,6 +104,26 @@ public double[] getVotesForInstance(Example<Instance> example){
@Override
public abstract double[] getVotesForInstance(Instance inst);

@Override
public double getConfidenceForPrediction(Instance inst, double prediction) {
double[] votes = this.getVotesForInstance(inst);
double predictionValue = votes[(int) prediction];

double sum = 0.0;
for (double vote : votes)
sum += vote;

// Check if the sum is zero
if (sum == 0.0)
return 0.0; // Return 0 if sum is zero to avoid division by zero
return predictionValue / sum;
}

@Override
public double getConfidenceForPrediction(Example<Instance> example, double prediction) {
return getConfidenceForPrediction(example.getData(), prediction);
}

@Override
public Prediction getPredictionForInstance(Example<Instance> example){
return getPredictionForInstance(example.getData());
Expand Down
10 changes: 6 additions & 4 deletions moa/src/main/java/moa/classifiers/Classifier.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package moa.classifiers;

Expand Down Expand Up @@ -76,7 +76,7 @@ public interface Classifier extends Learner<Example<Instance>> {
* test instance in each class
*/
public double[] getVotesForInstance(Instance inst);

/**
* Sets the reference to the header of the data stream. The header of the
* data stream is extended from WEKA
Expand All @@ -86,7 +86,7 @@ public interface Classifier extends Learner<Example<Instance>> {
* @param ih the reference to the data stream header
*/
//public void setModelContext(InstancesHeader ih);

/**
* Gets the reference to the header of the data stream. The header of the
* data stream is extended from WEKA
Expand All @@ -96,6 +96,8 @@ public interface Classifier extends Learner<Example<Instance>> {
* @return the reference to the data stream header
*/
//public InstancesHeader getModelContext();

public Prediction getPredictionForInstance(Instance inst);

public double getConfidenceForPrediction(Instance inst, double prediction);
}
16 changes: 10 additions & 6 deletions moa/src/main/java/moa/classifiers/SemiSupervisedLearner.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,16 @@
*/
package moa.classifiers;

import com.yahoo.labs.samoa.instances.Instance;
import moa.core.Example;
import moa.learners.Learner;

/**
* Learner interface for incremental semi supervised models. It is used only in the GUI Regression Tab.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
* Updated learner interface for semi-supervised methods.
*/
public interface SemiSupervisedLearner {

public interface SemiSupervisedLearner extends Learner<Example<Instance>> {
// Returns the pseudo-label used. If no pseudo-label was used, then return -1.
int trainOnUnlabeledInstance(Instance instance);

void addInitialWarmupTrainingInstances();
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package moa.classifiers.core.attributeclassobservers;

Expand Down Expand Up @@ -81,7 +81,7 @@ public void observeAttributeClass(double attVal, int classVal, double weight) {

@Override
public double probabilityOfAttributeValueGivenClass(double attVal,
int classVal) {
int classVal) {
GaussianEstimator obs = this.attValDistPerClass.get(classVal);
return obs != null ? obs.probabilityDensity(attVal) : 0.0;
}
Expand All @@ -99,12 +99,24 @@ public AttributeSplitSuggestion getBestEvaluatedSplitSuggestion(
if ((bestSuggestion == null) || (merit > bestSuggestion.merit)) {
bestSuggestion = new AttributeSplitSuggestion(
new NumericAttributeBinaryTest(attIndex, splitValue,
true), postSplitDists, merit);
true), postSplitDists, merit);
}
}
return bestSuggestion;
}

/* Used by PLASTIC during restructuring when forcing a leaf split becomes necessary */
public AttributeSplitSuggestion forceSplit(
SplitCriterion criterion, double[] preSplitDist, int attIndex, double threshold) {
AttributeSplitSuggestion bestSuggestion = null;
double[][] postSplitDists = getClassDistsResultingFromBinarySplit(threshold);
double merit = criterion.getMeritOfSplit(preSplitDist,
postSplitDists);
bestSuggestion = new AttributeSplitSuggestion(
new NumericAttributeBinaryTest(attIndex, threshold, true), postSplitDists, merit);
return bestSuggestion;
}

public double[] getSplitPointSuggestions() {
Set<Double> suggestedSplitValues = new TreeSet<Double>();
double minValue = Double.POSITIVE_INFINITY;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package moa.classifiers.core.attributeclassobservers;

Expand Down Expand Up @@ -68,7 +68,7 @@ public void observeAttributeClass(double attVal, int classVal, double weight) {

@Override
public double probabilityOfAttributeValueGivenClass(double attVal,
int classVal) {
int classVal) {
DoubleVector obs = this.attValDistPerClass.get(classVal);
return obs != null ? (obs.getValue((int) attVal) + 1.0)
/ (obs.sumOfValues() + obs.numValues()) : 0.0;
Expand Down Expand Up @@ -109,6 +109,33 @@ public AttributeSplitSuggestion getBestEvaluatedSplitSuggestion(
return bestSuggestion;
}

/* Used by PLASTIC during restructuring when forcing a leaf split becomes necessary */
public AttributeSplitSuggestion forceSplit(
SplitCriterion criterion, double[] preSplitDist, int attIndex, boolean binary, Double splitValue) {
AttributeSplitSuggestion bestSuggestion;
int maxAttValsObserved = getMaxAttValsObserved();
if (!binary) {
double[][] postSplitDists = getClassDistsResultingFromMultiwaySplit(maxAttValsObserved);
double merit = criterion.getMeritOfSplit(preSplitDist,
postSplitDists);
bestSuggestion = new AttributeSplitSuggestion(
new NominalAttributeMultiwayTest(attIndex), postSplitDists,
merit);
return bestSuggestion;
}
assert splitValue != null: "Split value is null";
if (splitValue >= maxAttValsObserved) {
return null;
}
double[][] postSplitDists = getClassDistsResultingFromBinarySplit(splitValue.intValue());
double merit = criterion.getMeritOfSplit(preSplitDist,
postSplitDists);
bestSuggestion = new AttributeSplitSuggestion(
new NominalAttributeBinaryTest(attIndex, splitValue.intValue()),
postSplitDists, merit);
return bestSuggestion;
}

public int getMaxAttValsObserved() {
int maxAttValsObserved = 0;
for (DoubleVector attValDist : this.attValDistPerClass) {
Expand Down Expand Up @@ -157,7 +184,7 @@ public double[][] getClassDistsResultingFromBinarySplit(int valIndex) {
}
}
return new double[][]{equalsDist.getArrayRef(),
notEqualDist.getArrayRef()};
notEqualDist.getArrayRef()};
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package moa.classifiers.core.conditionaltests;

Expand Down Expand Up @@ -48,6 +48,10 @@ public int branchForInstance(Instance inst) {
return inst.isMissing(instAttIndex) ? -1 : ((int) inst.value(instAttIndex) == this.attValue ? 0 : 1);
}

public double getValue() {
return attValue;
}

@Override
public String describeConditionForBranch(int branch, InstancesHeader context) {
if ((branch == 0) || (branch == 1)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package moa.classifiers.core.conditionaltests;

Expand All @@ -39,16 +39,20 @@ public class NumericAttributeBinaryTest extends InstanceConditionalBinaryTest {
protected boolean equalsPassesTest;

public NumericAttributeBinaryTest(int attIndex, double attValue,
boolean equalsPassesTest) {
boolean equalsPassesTest) {
this.attIndex = attIndex;
this.attValue = attValue;
this.equalsPassesTest = equalsPassesTest;
}

public double getValue() {
return attValue;
}

@Override
public int branchForInstance(Instance inst) {
int instAttIndex = this.attIndex ; // < inst.classIndex() ? this.attIndex
// : this.attIndex + 1;
// : this.attIndex + 1;
if (inst.isMissing(instAttIndex)) {
return -1;
}
Expand Down
Loading