diff --git a/assembly/dependencies-apache-ignite-slim.xml b/assembly/dependencies-apache-ignite-slim.xml
index 96b28f8b2e89d4..e98695c8a0c859 100644
--- a/assembly/dependencies-apache-ignite-slim.xml
+++ b/assembly/dependencies-apache-ignite-slim.xml
@@ -145,20 +145,17 @@
org.apache.ignite:ignite-aoporg.apache.ignite:ignite-aws
- org.apache.ignite:ignite-camelorg.apache.ignite:ignite-cassandra-serializersorg.apache.ignite:ignite-cassandra-storeorg.apache.ignite:ignite-cloudorg.apache.ignite:ignite-direct-ioorg.apache.ignite:ignite-gceorg.apache.ignite:ignite-jcl
- org.apache.ignite:ignite-jms11org.apache.ignite:ignite-mesosorg.apache.ignite:ignite-mlorg.apache.ignite:ignite-ml-h2o-model-parserorg.apache.ignite:ignite-ml-spark-model-parserorg.apache.ignite:ignite-ml-xgboost-model-parser
- org.apache.ignite:ignite-mqttorg.apache.ignite:ignite-osgiorg.apache.ignite:ignite-osgi-karaforg.apache.ignite:ignite-osgi-paxlogging
@@ -167,7 +164,6 @@
org.apache.ignite:ignite-sparkorg.apache.ignite:ignite-spark-2.4org.apache.ignite:ignite-ssh
- org.apache.ignite:ignite-stormorg.apache.ignite:ignite-weborg.apache.ignite:ignite-yarnorg.apache.ignite:ignite-zookeeper
diff --git a/docs/README.adoc b/docs/README.adoc
index 856b993f9f45cc..710f7847fc6785 100644
--- a/docs/README.adoc
+++ b/docs/README.adoc
@@ -68,6 +68,46 @@ $ docker run -v "$PWD:/srv/jekyll" -p 4000:4000 jekyll/jekyll:latest jekyll s
Open `http://localhost:4000/docs[window=_blank]` in your browser.
+=== Troubleshooting
+
+Below are some issues you might hit during an installation of the Jekyll environment or while building the tutorials.
+Let us know if you come across a new and found a workaround.
+
+==== MacOS: Issues with FFI library during Jekyll installation
+
+You should see an error trace similar to this: https://github.com/ffi/ffi/issues/653
+
+Attempt to fix the problem by following this sequence of commands (typically it's the last command only):
+
+[source, text]
+----
+brew reinstall libffi
+export LDFLAGS="-L/usr/local/opt/libffi/lib"
+export CPPFLAGS="-I/usr/local/opt/libffi/include"
+export PKG_CONFIG_PATH="/usr/local/opt/libffi/lib/pkgconfig"
+gem install --user-install bundler jekyll
+----
+
+==== MacOS: jekyll-asciidoc gem is not installed by default
+
+Try to follow this procedure to fix the issue.
+
+* Comment out the `rm -rf $tmp_dir` at the very end of the `build.sh` script, so that the temp folder is not deleted after the execution.
+* Run `build.sh` (fails with `Could not find gem 'jekyll-asciidoc'...` error).
+* Go to `tmp/web_site` folder.
+* Run `bundle install`.
+* Revert the `build.sh` script and run it again.
+
+==== MacOS: can't build project due to inability to load openssl
+
+You should see an error like this:
+
+`LoadError: dlopen(/Users/dmagda/.rbenv/versions/2.6.2/lib/ruby/2.6.0/x86_64-darwin18/digest/sha1.bundle, 9): Library not loaded: /usr/local/opt/openssl/lib/libssl.1.0.0.dylib
+ Referenced from: /Users/dmagda/.rbenv/versions/2.6.2/lib/ruby/2.6.0/x86_64-darwin18/digest/sha1.bundle`
+
+Try to upgrade Ruby, rbenv to the latest version (2.7.1) and then reinstall Jekyll. Use the official instructions:
+https://jekyllrb.com/docs/installation/
+
== How to Contribute
If you want to contribute to the documentation, add or modify the relevant page in the `docs/_docs` directory.
diff --git a/docs/_docs/clustering/network-configuration.adoc b/docs/_docs/clustering/network-configuration.adoc
index d656b0c8040c88..8d47b60a853983 100644
--- a/docs/_docs/clustering/network-configuration.adoc
+++ b/docs/_docs/clustering/network-configuration.adoc
@@ -49,13 +49,26 @@ tab:C++[unsupported]
The following table describes some most important properties of `TcpDiscoverySpi`.
You can find the complete list of properties in the javadoc:org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi[] javadoc.
+[CAUTION]
+====
+You should initialize the `IgniteConfiguration.localHost` or `TcpDiscoverySpi.localAddress` parameter with the network
+interface that will be used for inter-node communication. By default, a node binds to and listens on all available IP
+addresses of an environment it's running on. It can prolong node failures detection if some of the node's addresses are
+not reachable from other cluster nodes.
+====
+
[cols="1,2,1",opts="header"]
|===
|Property | Description| Default Value
-| `localAddress`| Local host IP address used for discovery. | By default, the node uses the first non-loopback address it finds. If there is no non-loopback address available, then `java.net.InetAddress.getLocalHost()` is used.
+| `localAddress`| Local host IP address used for discovery. If set, overrides the `IgniteConfiguration.localHost` setting. | By default, a node binds to all available network addresses. If there is a non-loopback address available, then java.net.InetAddress.getLocalHost() is used.
| `localPort` | The port that the node binds to. If set to a non-default value, other cluster nodes must know this port to be able to discover the node. | `47500`
| `localPortRange`| If the `localPort` is busy, the node attempts to bind to the next port (incremented by 1) and continues this process until it finds a free port. The `localPortRange` property defines the number of ports the node will try (starting from `localPort`).
| `100`
+| `soLinger`| Specifies a linger-on-close timeout of TCP sockets used by Discovery SPI. See Java `Socket.setSoLinger` API
+for details on how to adjust this setting. In Ignite, the timeout defaults to a non-negative value to prevent
+link:https://bugs.openjdk.java.net/browse/JDK-8219658[potential deadlocks with SSL connections, window=_blank] but,
+as a side effect, this can prolong the detection of cluster node failures. Alternatively, update your JRE version to the
+one with the SSL issue fixed and adjust this setting accordingly. | `0`
| `reconnectCount` | The number of times the node tries to (re)establish connection to another node. |`10`
| `networkTimeout` | The maximum network timeout in milliseconds for network operations. |`5000`
| `socketTimeout` | The socket operations timeout. This timeout is used to limit connection time and write-to-socket time. |`5000`
@@ -110,7 +123,7 @@ You can find the list of all properties in the javadoc:org.apache.ignite.spi.com
[cols="1,2,1",opts="header"]
|===
|Property | Description| Default Value
-| `localAddress` | The local address for the communication SPI to bind to. |
+| `localAddress` | The local address for the communication SPI to bind to. If set, overrides the `IgniteConfiguration.localHost` setting. |
| `localPort` | The local port that the node uses for communication. | `47100`
diff --git a/docs/_docs/extensions-and-integrations/spring/spring-data.adoc b/docs/_docs/extensions-and-integrations/spring/spring-data.adoc
index ece798bd92b7df..8216a591bde990 100644
--- a/docs/_docs/extensions-and-integrations/spring/spring-data.adoc
+++ b/docs/_docs/extensions-and-integrations/spring/spring-data.adoc
@@ -32,12 +32,18 @@ tab:pom.xml[]
----
org.apache.ignite
- ignite-spring-data
+ ignite-spring-data_2.2{ignite.version}
----
--
+[NOTE]
+====
+If your Spring Data version is earlier than Spring Data 2.2 then set `ignite-spring-data_2.0`
+or `ignite-spring-data` as an `artifactId` in the pom.xml configuration.
+====
+
== Apache Ignite Repository
Apache Ignite introduces a special `IgniteRepository` interface that extends default `CrudRepository`. This interface
@@ -220,7 +226,7 @@ System.out.println("\n>>> Top Person with surname 'Smith': " +
== Example
-The complete example is available on link:{githubUrl}/examples/src/main/java/org/apache/ignite/examples/springdata[GitHub, window=_blank].
+The complete example is available on link: https://github.com/apache/ignite-extensions/tree/master/modules/spring-data-2.0-ext/examples/main[GitHub, windows="_blank"]
== Tutorial
diff --git a/docs/_docs/extensions-and-integrations/streaming/camel-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/camel-streamer.adoc
index 8734d8133e95dc..a42129383c68df 100644
--- a/docs/_docs/extensions-and-integrations/streaming/camel-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/camel-streamer.adoc
@@ -43,7 +43,7 @@ to interact with Ignite Caches, Compute, Events, Messaging, etc. from within a C
== Maven Dependency
-To make use of the `ignite-camel` streamer, you need to add the following dependency:
+To make use of the `ignite-camel-ext` streamer, you need to add the following dependency:
[tabs]
--
@@ -52,8 +52,8 @@ tab:pom.xml[]
----
org.apache.ignite
- ignite-camel
- ${ignite.version}
+ ignite-camel-ext
+ ${ignite-camel-ext.version}
----
--
diff --git a/docs/_docs/extensions-and-integrations/streaming/flink-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/flink-streamer.adoc
index f25ed05dce9b27..92ab398167d670 100644
--- a/docs/_docs/extensions-and-integrations/streaming/flink-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/flink-streamer.adoc
@@ -21,7 +21,7 @@ Starting data transfer to Ignite cache can be done with the following steps.
. Import Ignite Flink Sink Module in Maven Project
If you are using Maven to manage dependencies of your project, you can add Flink module
-dependency like this (replace `${ignite.version}` with actual Ignite version you are
+dependency like this (replace `${ignite-flink-ext.version}` with actual Ignite Flink Extension version you are
interested in):
+
[tabs]
@@ -38,8 +38,8 @@ tab:pom.xml[]
...
org.apache.ignite
- ignite-flink
- ${ignite.version}
+ ignite-flink-ext
+ ${ignite-flink-ext.version}
...
diff --git a/docs/_docs/extensions-and-integrations/streaming/flume-sink.adoc b/docs/_docs/extensions-and-integrations/streaming/flume-sink.adoc
index 97a741df8add71..3697c7cf08e182 100644
--- a/docs/_docs/extensions-and-integrations/streaming/flume-sink.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/flume-sink.adoc
@@ -41,7 +41,7 @@ plugins.d/
`-- libext
|-- cache-api-1.0.0.jar
|-- ignite-core-x.x.x.jar
- |-- ignite-flume-x.x.x.jar <-- IgniteSink
+ |-- ignite-flume-ext.x.x.x.jar <-- IgniteSink
|-- ignite-spring-x.x.x.jar
|-- spring-aop-4.1.0.RELEASE.jar
|-- spring-beans-4.1.0.RELEASE.jar
diff --git a/docs/_docs/extensions-and-integrations/streaming/jms-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/jms-streamer.adoc
index 5c7c883ec6e692..b3f9be9864bb74 100644
--- a/docs/_docs/extensions-and-integrations/streaming/jms-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/jms-streamer.adoc
@@ -116,8 +116,8 @@ tab:pom.xml[]
----
org.apache.ignite
- ignite-jms11
- ${ignite.version}
+ ignite-jms11-ext
+ ${ignite-jms11-ext.version}
----
--
diff --git a/docs/_docs/extensions-and-integrations/streaming/kafka-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/kafka-streamer.adoc
index f00946a78d62e8..a45fa4d792f8f2 100644
--- a/docs/_docs/extensions-and-integrations/streaming/kafka-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/kafka-streamer.adoc
@@ -153,7 +153,7 @@ http://node1:8080/ignite?cmd=size&cacheName=cache1
== Streaming data with Ignite Kafka Streamer Module
If you are using Maven to manage dependencies of your project, first of all you will have to add Kafka Streamer module
-dependency like this (replace `${ignite.version}` with actual Ignite version you are interested in):
+dependency like this (replace `${ignite-kafka-ext.version}` with actual Ignite Kafka Extension version you are interested in):
[tabs]
--
@@ -169,8 +169,8 @@ tab:pom.xml[]
...
org.apache.ignite
- ignite-kafka
- ${ignite.version}
+ ignite-kafka-ext
+ ${ignite-kafka-ext.version}
...
diff --git a/docs/_docs/extensions-and-integrations/streaming/mqtt-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/mqtt-streamer.adoc
index f7ec04cf8c3060..1339c97fce2685 100644
--- a/docs/_docs/extensions-and-integrations/streaming/mqtt-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/mqtt-streamer.adoc
@@ -73,4 +73,4 @@ streamer.start();
----
--
-Refer to the Javadocs of the `ignite-mqtt` module for more info on the available options.
+Refer to the Javadocs of the `ignite-mqtt-ext` module for more info on the available options.
diff --git a/docs/_docs/extensions-and-integrations/streaming/rocketmq-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/rocketmq-streamer.adoc
index 4f7dcbb9e5854b..a302ca722c4b22 100644
--- a/docs/_docs/extensions-and-integrations/streaming/rocketmq-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/rocketmq-streamer.adoc
@@ -20,7 +20,7 @@ to Ignite.
To use Ignite RocketMQ Streamer module
. Import it to your Maven project. If you are using Maven to manage dependencies of your project, you can add an Ignite
-RocketMQ module dependency like this (replace `${ignite.version}` with actual Ignite version you are interested in):
+RocketMQ module dependency like this (replace `${ignite-rocketmq-ext.version}` with actual Ignite RocketMQ Extension version you are interested in):
+
[tabs]
--
@@ -36,8 +36,8 @@ tab:pom.xml[]
...
org.apache.ignite
- ignite-rocketmq
- ${ignite.version}
+ ignite-rocketmq-ext
+ ${ignite-rocketmq-ext.version}
...
diff --git a/docs/_docs/extensions-and-integrations/streaming/storm-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/storm-streamer.adoc
index e6871b79807753..887712e1058fd1 100644
--- a/docs/_docs/extensions-and-integrations/streaming/storm-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/storm-streamer.adoc
@@ -19,7 +19,7 @@ Apache Ignite Storm Streamer module provides streaming via http://storm.apache.o
Starting data transfer to Ignite can be done with the following steps.
. Import Ignite Storm Streamer Module In Maven Project. If you are using Maven to manage dependencies of your project,
-you can add Storm module dependency like this (replace `${ignite.version}` with actual Ignite version you are interested in):
+you can add Storm module dependency like this (replace `${ignite-storm-ext.version}` with actual Ignite Storm Extension version you are interested in):
+
[tabs]
--
@@ -35,8 +35,8 @@ tab:pom.xml[]
...
org.apache.ignite
- ignite-storm
- ${ignite.version}
+ ignite-storm-ext
+ ${ignite-storm-ext.version}
...
diff --git a/docs/_docs/extensions-and-integrations/streaming/twitter-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/twitter-streamer.adoc
index 8c6e65737f6037..4f47c60f250632 100644
--- a/docs/_docs/extensions-and-integrations/streaming/twitter-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/twitter-streamer.adoc
@@ -18,7 +18,7 @@ Ignite Twitter Streamer module consumes tweets from Twitter and feeds the transf
To stream data from Twitter into Ignite, you need to:
-. Import Ignite Twitter Module with Maven and replace `${ignite.version}` with the actual Ignite version you are interested in.
+. Import Ignite Twitter Module with Maven and replace `${ignite-twitter-ext.version}` with the actual Ignite Twitter Extension version you are interested in.
+
[tabs]
--
@@ -27,8 +27,8 @@ tab:pom.xml[]
----
org.apache.ignite
- ignite-twitter
- ${ignite.version}
+ ignite-twitter-ext
+ ${ignite-twitter-ext.version}
----
--
diff --git a/docs/_docs/extensions-and-integrations/streaming/zeromq-streamer.adoc b/docs/_docs/extensions-and-integrations/streaming/zeromq-streamer.adoc
index 9432624be1cfa4..918c0e827f94ea 100644
--- a/docs/_docs/extensions-and-integrations/streaming/zeromq-streamer.adoc
+++ b/docs/_docs/extensions-and-integrations/streaming/zeromq-streamer.adoc
@@ -29,8 +29,8 @@ tab:pom.xml[]
...
org.apache.ignite
- ignite-zeromq
- ${ignite.version}
+ ignite-zeromq-ext
+ ${ignite-zeromq-ext.version}
...
diff --git a/docs/_docs/machine-learning/binary-classification/decision-trees.adoc b/docs/_docs/machine-learning/binary-classification/decision-trees.adoc
index 57ab7bf21b1d14..bc9ff058402ced 100644
--- a/docs/_docs/machine-learning/binary-classification/decision-trees.adoc
+++ b/docs/_docs/machine-learning/binary-classification/decision-trees.adoc
@@ -39,12 +39,12 @@ The model works this way - the split process stops when either the algorithm has
== Model
-The Model in a decision tree classification is represented by the class `DecisionTreeNode`. We can make a prediction for a given vector of features in the following way:
+The Model in a decision tree classification is represented by the class `DecisionTreeModel`. We can make a prediction for a given vector of features in the following way:
[source, java]
----
-DecisionTreeNode mdl = ...;
+DecisionTreeModel mdl = ...;
double prediction = mdl.apply(observation);
----
@@ -68,7 +68,7 @@ DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTraine
);
// Train model.
-DecisionTreeNode mdl = trainer.fit(ignite, dataCache, vectorizer);
+DecisionTreeModel mdl = trainer.fit(ignite, dataCache, vectorizer);
----
diff --git a/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc b/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc
index 92992f87200ca1..065cb78d35bcbc 100644
--- a/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc
+++ b/docs/_docs/machine-learning/importing-model/model-import-from-apache-spark.adoc
@@ -71,7 +71,7 @@ To load in Ignite ML you should use SparkModelParser class via method parse() ca
[source, java]
----
-DecisionTreeNode mdl = (DecisionTreeNode)SparkModelParser.parse(
+DecisionTreeModel mdl = (DecisionTreeModel)SparkModelParser.parse(
SPARK_MDL_PATH,
SupportedSparkModels.DECISION_TREE
);
diff --git a/docs/_docs/machine-learning/model-selection/cross-validation.adoc b/docs/_docs/machine-learning/model-selection/cross-validation.adoc
index 8e64c68e67f4ec..39e00f1a5c6f31 100644
--- a/docs/_docs/machine-learning/model-selection/cross-validation.adoc
+++ b/docs/_docs/machine-learning/model-selection/cross-validation.adoc
@@ -27,7 +27,7 @@ Let’s imagine that we have a trainer, a training set and we want to make cross
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
// Create cross-validation instance
-CrossValidation scoreCalculator
+CrossValidation scoreCalculator
= new CrossValidation<>();
// Set up the cross-validation process
@@ -67,7 +67,7 @@ Pipeline pipeline
// Create cross-validation instance
-CrossValidation scoreCalculator
+CrossValidation scoreCalculator
= new CrossValidation<>();
// Set up the cross-validation process
diff --git a/docs/_docs/machine-learning/model-selection/pipeline-api.adoc b/docs/_docs/machine-learning/model-selection/pipeline-api.adoc
index 7f0cb93e3bcba9..9b2798c25865d7 100644
--- a/docs/_docs/machine-learning/model-selection/pipeline-api.adoc
+++ b/docs/_docs/machine-learning/model-selection/pipeline-api.adoc
@@ -64,7 +64,7 @@ Preprocessor normalizationPreprocessor = new NormalizationTrain
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
-CrossValidation scoreCalculator = new CrossValidation<>();
+CrossValidation scoreCalculator = new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
.addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] {1.0, 2.0, 3.0, 4.0, 5.0, 10.0})
@@ -101,7 +101,7 @@ Pipeline pipeline = new Pipeline())
.addTrainer(trainer);
-CrossValidation scoreCalculator = new CrossValidation<>();
+CrossValidation scoreCalculator = new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
.addHyperParam("maxDeep", trainer::withMaxDeep, new Double[] {1.0, 2.0, 3.0, 4.0, 5.0, 10.0})
diff --git a/docs/_docs/machine-learning/regression/decision-trees-regression.adoc b/docs/_docs/machine-learning/regression/decision-trees-regression.adoc
index 48f9d5cc289e22..2abbaa8dc71ddc 100644
--- a/docs/_docs/machine-learning/regression/decision-trees-regression.adoc
+++ b/docs/_docs/machine-learning/regression/decision-trees-regression.adoc
@@ -39,12 +39,12 @@ The model works this way - the split process stops when either the algorithm has
== Model
-The Model in a decision tree classification is represented by the class `DecisionTreeNode`. We can make a prediction for a given vector of features in the following way:
+The Model in a decision tree classification is represented by the class `DecisionTreeModel`. We can make a prediction for a given vector of features in the following way:
[source, java]
----
-DecisionTreeNode mdl = ...;
+DecisionTreeModel mdl = ...;
double prediction = mdl.apply(observation);
----
@@ -67,7 +67,7 @@ DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(
);
// Train model.
-DecisionTreeNode mdl = trainer.fit(ignite, dataCache, vectorizer);
+DecisionTreeModel mdl = trainer.fit(ignite, dataCache, vectorizer);
----
== Examples
diff --git a/docs/_docs/monitoring-metrics/system-views.adoc b/docs/_docs/monitoring-metrics/system-views.adoc
index 92b713ff1ff118..1d400c6681800c 100644
--- a/docs/_docs/monitoring-metrics/system-views.adoc
+++ b/docs/_docs/monitoring-metrics/system-views.adoc
@@ -32,7 +32,7 @@ See the link:SQL/schemas[Understanding Schemas] page for the information on how
== Querying System Views
-To query the system views using the link:sqlline[SQLLine] tool, connect to the SYS schema as follows:
+To query the system views using the link:tools/sqlline[SQLLine] tool, connect to the SYS schema as follows:
[source, shell]
----
diff --git a/docs/_docs/quick-start/sql.adoc b/docs/_docs/quick-start/sql.adoc
index 7d1c3dfe829344..c1d1eed2e13f5a 100644
--- a/docs/_docs/quick-start/sql.adoc
+++ b/docs/_docs/quick-start/sql.adoc
@@ -126,4 +126,4 @@ Easy!
From here, you may want to:
* Read more about using Ignite and link:SQL/sql-introduction[SQL]
-* Read more about using link:sqlline[sqlline]
+* Read more about using link:tools/sqlline[sqlline]
diff --git a/docs/_docs/security/ssl-tls.adoc b/docs/_docs/security/ssl-tls.adoc
index bf5a90e2bfd1d8..b56b2094d586cf 100644
--- a/docs/_docs/security/ssl-tls.adoc
+++ b/docs/_docs/security/ssl-tls.adoc
@@ -32,6 +32,14 @@ To enable SSL/TLS for cluster nodes, configure an `SSLContext` factory in the no
You can use the `org.apache.ignite.ssl.SslContextFactory`, which is the default factory that uses a configurable keystore to initialize the SSL context.
//You can also implement your own `SSLContext` factory.
+[CAUTION]
+====
+Ensure that your version of the JVM addresses
+link:https://bugs.openjdk.java.net/browse/JDK-8219658[the following issue, window=_blank] that can cause deadlocks
+in SSL connections. If your JVM is affected but can't be updated, then set
+the link:clustering/network-configuration[`TcpDiscoverySpi.soLinger`] parameter to a non-negative value.
+====
+
Below is an example of `SslContextFactory` configuration:
[tabs]
diff --git a/docs/_docs/sql-reference/operational-commands.adoc b/docs/_docs/sql-reference/operational-commands.adoc
index f5dea2254ce6d6..be7223f6bb51fe 100644
--- a/docs/_docs/sql-reference/operational-commands.adoc
+++ b/docs/_docs/sql-reference/operational-commands.adoc
@@ -115,7 +115,7 @@ While streaming mode allows you to load data much faster than other data loading
2. Due to streaming mode's asynchronous nature, you cannot know update counts for every statement executed; all JDBC/ODBC commands returning update counts will return 0.
=== Example
-As an example, you can use the sample world.sql file that is shipped with the latest Ignite distribution. It can be found in the `{IGNITE_HOME}/examples/sql/` directory. You can use the `run` command from link:sqlline[SQLLine, window=_blank], as shown below:
+As an example, you can use the sample world.sql file that is shipped with the latest Ignite distribution. It can be found in the `{IGNITE_HOME}/examples/sql/` directory. You can use the `run` command from tools/sqlline[SQLLine, window=_blank], as shown below:
[source,shell]
----
diff --git a/docs/_docs/thin-client-comparison.csv b/docs/_docs/thin-client-comparison.csv
index ee2fe80ab1c5cb..232518383cda93 100644
--- a/docs/_docs/thin-client-comparison.csv
+++ b/docs/_docs/thin-client-comparison.csv
@@ -7,9 +7,10 @@ Async Operations,No,{yes},No,{yes},{yes},{yes}
SSL/TLS,{yes},{yes},{yes},{yes},{yes},{yes}
Authentication,{yes},{yes},{yes},{yes},{yes},{yes}
Partition Awareness,{yes},{yes},{yes},{yes},{yes},No
-Failover,{yes},No,{yes},{yes},{yes},{yes}
-Transactions,{yes},No,No,No,No,No
+Failover,{yes},{yes},{yes},{yes},{yes},{yes}
+Transactions,{yes},{yes},No,No,No,No
Cluster API,{yes},{yes},No,No,No,No
-Cluster discovery,No,{yes},No,No,No,No
Compute,{yes},{yes},No,No,No,No
-Service invocation,{yes},No,No,No,No,No
\ No newline at end of file
+Service invocation,{yes},{yes},No,No,No,No
+Server Discovery,No,{yes},No,No,No,No
+Server Discovery in Kubernetes,{yes},No,No,No,No,No
\ No newline at end of file
diff --git a/docs/_docs/thin-clients/getting-started-with-thin-clients.adoc b/docs/_docs/thin-clients/getting-started-with-thin-clients.adoc
index 7860bf49491de0..5e0c37ccd26603 100644
--- a/docs/_docs/thin-clients/getting-started-with-thin-clients.adoc
+++ b/docs/_docs/thin-clients/getting-started-with-thin-clients.adoc
@@ -46,7 +46,7 @@ include::thin-client-comparison.csv[]
=== Client Connection Failover
-All thin clients (except for the .NET thin client) support a connection failover mechanism, whereby the client automatically switches to an available node in case of the current node or connection failure.
+All thin clients support a connection failover mechanism, whereby the client automatically switches to an available node in case of the current node or connection failure.
For this mechanism to work, you need to provide a list of node addresses you want to use for failover purposes in the client configuration.
Refer to the specific client documentation for more details.
diff --git a/examples/pom.xml b/examples/pom.xml
index 388a78ad029b52..25a5b87852a8b3 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -106,6 +106,12 @@
org.apache.igniteignite-ml${project.version}
+
+
+ com.fasterxml.jackson.core
+ *
+
+
@@ -248,6 +254,16 @@
net.alchim31.mavenscala-maven-plugin
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+
+ ${spark.folder}
+ ${spark.test.folder}
+
+
+
@@ -279,6 +295,21 @@
${project.version}
+
+
+
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+
+ ${lgpl.folder}
+ ${lgpl.test.folder}
+
+
+
+
+ spark-2.4
@@ -321,6 +352,16 @@
net.alchim31.mavenscala-maven-plugin
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+
+ ${spark.folder}
+ ${spark.test.folder}
+
+
+
diff --git a/examples/src/main/java-lgpl/org/apache/ignite/examples/misc/schedule/ComputeScheduleExample.java b/examples/src/main/java-lgpl/org/apache/ignite/examples/misc/schedule/ComputeScheduleExample.java
index d480309b84bfb4..c58de3bc4b10d1 100644
--- a/examples/src/main/java-lgpl/org/apache/ignite/examples/misc/schedule/ComputeScheduleExample.java
+++ b/examples/src/main/java-lgpl/org/apache/ignite/examples/misc/schedule/ComputeScheduleExample.java
@@ -29,6 +29,9 @@
* Demonstrates a cron-based {@link Runnable} execution scheduling.
* Test runnable object broadcasts a phrase to all cluster nodes every minute
* three times with initial scheduling delay equal to five seconds.
+ * This example uses an Ignite extension to Cron syntax,
+ * which can be used to specify an initial delay in seconds and a number of runs.
+ * https://apacheignite.readme.io/docs/cron-based-scheduling#syntax-extension
*
* Remote nodes should always be started with special configuration file which
* enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.
@@ -68,12 +71,19 @@ public static void main(String[] args) throws IgniteException {
return invocations;
}
},
- "{9, 5, 3} * * * * *" // Cron expression.
+ // Callable object broadcasts a phrase to all cluster nodes every minute
+ // three times with initial scheduling delay equal to five seconds.
+ // https://apacheignite.readme.io/docs/cron-based-scheduling#syntax-extension
+ "{5, 3} * * * * *" // Cron expression.
);
while (!fut.isDone())
System.out.println(">>> Invocation #: " + fut.get());
+ // In case the Cron expression is invalid, SchedulerFuture will be immediately completed with an error,
+ // that provides additional details.
+ fut.get();
+
System.out.println();
System.out.println(">>> Schedule future is done and has been unscheduled.");
System.out.println(">>> Check all nodes for hello message output.");
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
index beee4f6a721e53..3127418f9653fd 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/clustering/KMeansClusterizationExample.java
@@ -73,8 +73,8 @@ public static void main(String[] args) throws IOException {
);
System.out.println(">>> KMeans centroids");
- Tracer.showAscii(mdl.getCenters()[0]);
- Tracer.showAscii(mdl.getCenters()[1]);
+ Tracer.showAscii(mdl.centers()[0]);
+ Tracer.showAscii(mdl.centers()[1]);
System.out.println(">>>");
System.out.println(">>> --------------------------------------------");
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/ANNClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/ANNClassificationExportImportExample.java
new file mode 100644
index 00000000000000..618e4c6cdaf463
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/ANNClassificationExportImportExample.java
@@ -0,0 +1,339 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.UUID;
+import javax.cache.Cache;
+import org.apache.commons.math3.util.Precision;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
+import org.apache.ignite.cache.query.QueryCursor;
+import org.apache.ignite.cache.query.ScanQuery;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
+import org.apache.ignite.ml.knn.NNClassificationModel;
+import org.apache.ignite.ml.knn.ann.ANNClassificationModel;
+import org.apache.ignite.ml.knn.ann.ANNClassificationTrainer;
+import org.apache.ignite.ml.math.distances.EuclideanDistance;
+import org.apache.ignite.ml.math.distances.ManhattanDistance;
+import org.apache.ignite.ml.math.primitives.vector.impl.DenseVector;
+
+/**
+ * Run ANN multi-class classification trainer ({@link ANNClassificationTrainer}) over distributed dataset.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Iris dataset).
+ *
+ * After that it trains the model based on the specified data using
+ * kNN algorithm.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict what cluster does
+ * this point belong to, and compares prediction to expected outcome (ground truth).
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ * Code in this example launches Ignite grid and fills the cache with test data points.
+ *
+ * After that it trains the naive Bayes classification model based on the specified data.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
+ * compares prediction to expected outcome (ground truth), and builds
+ * confusion matrix.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class CompoundNaiveBayesExportImportExample {
+ /** Run example. */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> Compound Naive Bayes classification model over partitioned dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println(">>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite)
+ .fillCacheWith(MLSandboxDatasets.MIXED_DATASET);
+
+ double[] priorProbabilities = new double[]{.5, .5};
+ double[][] thresholds = new double[][]{{.5}, {.5}, {.5}, {.5}, {.5}};
+
+ System.out.println("\n>>> Create new naive Bayes classification trainer object.");
+ CompoundNaiveBayesTrainer trainer = new CompoundNaiveBayesTrainer()
+ .withPriorProbabilities(priorProbabilities)
+ .withGaussianNaiveBayesTrainer(new GaussianNaiveBayesTrainer())
+ .withGaussianFeatureIdsToSkip(asList(3, 4, 5, 6, 7))
+ .withDiscreteNaiveBayesTrainer(new DiscreteNaiveBayesTrainer()
+ .setBucketThresholds(thresholds))
+ .withDiscreteFeatureIdsToSkip(asList(0, 1, 2));
+ System.out.println("\n>>> Perform the training to get the model.");
+
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+
+ CompoundNaiveBayesModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+
+ System.out.println("\n>>> Exported Compound Naive Bayes model: " + mdl.toString(true));
+
+ double accuracy = Evaluator.evaluate(
+ dataCache,
+ mdl,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for exported Compound Naive Bayes model:" + accuracy);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ CompoundNaiveBayesModel modelImportedFromJSON = CompoundNaiveBayesModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported Compound Naive Bayes model: " + modelImportedFromJSON.toString(true));
+
+ accuracy = Evaluator.evaluate(
+ dataCache,
+ modelImportedFromJSON,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for imported Compound Naive Bayes model:" + accuracy);
+
+ System.out.println("\n>>> Compound Naive Bayes model over partitioned dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeClassificationExportImportExample.java
new file mode 100644
index 00000000000000..e7ad7ca71e676e
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeClassificationExportImportExample.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Random;
+import org.apache.commons.math3.util.Precision;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
+import org.apache.ignite.ml.structures.LabeledVector;
+import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
+
+/**
+ * Example of using distributed {@link DecisionTreeClassificationTrainer}.
+ *
+ * Code in this example launches Ignite grid and fills the cache with pseudo random training data points.
+ *
+ * After that it creates classification trainer and uses it to train the model on the training set.
+ *
+ * Finally, this example loops over the pseudo randomly generated test set of data points, applies the trained model,
+ * and compares prediction to expected outcome.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class DecisionTreeClassificationExportImportExample {
+ /**
+ * Executes example.
+ *
+ * @param args Command line arguments, none required.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println(">>> Decision tree classification trainer example started.");
+
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ // Create cache with training data.
+ CacheConfiguration> trainingSetCfg = new CacheConfiguration<>();
+ trainingSetCfg.setName("TRAINING_SET");
+ trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
+
+ IgniteCache> trainingSet = null;
+ Path jsonMdlPath = null;
+ try {
+ trainingSet = ignite.createCache(trainingSetCfg);
+
+ Random rnd = new Random(0);
+
+ // Fill training data.
+ for (int i = 0; i < 1000; i++)
+ trainingSet.put(i, generatePoint(rnd));
+
+ // Create classification trainer.
+ DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
+
+ // Train decision tree model.
+ LabeledDummyVectorizer vectorizer = new LabeledDummyVectorizer<>();
+ DecisionTreeModel mdl = trainer.fit(
+ ignite,
+ trainingSet,
+ vectorizer
+ );
+
+ System.out.println("\n>>> Exported Decision tree classification model: " + mdl);
+
+ int correctPredictions = evaluateModel(rnd, mdl);
+
+ System.out.println("\n>>> Accuracy for exported Decision tree classification model: " + correctPredictions / 10.0 + "%");
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ DecisionTreeModel modelImportedFromJSON = DecisionTreeModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported Decision tree classification model: " + modelImportedFromJSON);
+
+ correctPredictions = evaluateModel(rnd, modelImportedFromJSON);
+
+ System.out.println("\n>>> Accuracy for imported Decision tree classification model: " + correctPredictions / 10.0 + "%");
+
+ System.out.println("\n>>> Decision tree classification trainer example completed.");
+ }
+ finally {
+ if (trainingSet != null)
+ trainingSet.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+ private static int evaluateModel(Random rnd, DecisionTreeModel mdl) {
+ // Calculate score.
+ int correctPredictions = 0;
+ for (int i = 0; i < 1000; i++) {
+ LabeledVector pnt = generatePoint(rnd);
+
+ double prediction = mdl.predict(pnt.features());
+ double lbl = pnt.label();
+
+ if (i % 50 == 1)
+ System.out.printf(">>> test #: %d\t\t predicted: %.4f\t\tlabel: %.4f\n", i, prediction, lbl);
+
+ if (Precision.equals(prediction, lbl, Precision.EPSILON))
+ correctPredictions++;
+ }
+ return correctPredictions;
+ }
+
+ /**
+ * Generate point with {@code x} in (-0.5, 0.5) and {@code y} in the same interval. If {@code x * y > 0} then label
+ * is 1, otherwise 0.
+ *
+ * @param rnd Random.
+ * @return Point with label.
+ */
+ private static LabeledVector generatePoint(Random rnd) {
+
+ double x = rnd.nextDouble() - 0.5;
+ double y = rnd.nextDouble() - 0.5;
+
+ return new LabeledVector<>(VectorUtils.of(x, y), x * y > 0 ? 1. : 0.);
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeRegressionExportImportExample.java
new file mode 100644
index 00000000000000..9857ba9edab557
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DecisionTreeRegressionExportImportExample.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
+import org.apache.ignite.ml.structures.LabeledVector;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
+import org.apache.ignite.ml.tree.DecisionTreeRegressionTrainer;
+
+/**
+ * Example of using distributed {@link DecisionTreeRegressionTrainer}.
+ *
+ * Code in this example launches Ignite grid and fills the cache with generated test data points ({@code sin(x)} on
+ * interval {@code [0, 10)}).
+ *
+ * After that it creates classification trainer and uses it to train the model on the training set.
+ *
+ * Finally, this example loops over the test data points, applies the trained model, and compares prediction to expected
+ * outcome (ground truth).
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class DecisionTreeRegressionExportImportExample {
+ /**
+ * Executes example.
+ *
+ * @param args Command line arguments, none required.
+ */
+ public static void main(String... args) throws IOException {
+ System.out.println(">>> Decision tree regression trainer example started.");
+
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ // Create cache with training data.
+ CacheConfiguration> trainingSetCfg = new CacheConfiguration<>();
+ trainingSetCfg.setName("TRAINING_SET");
+ trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
+
+ IgniteCache> trainingSet = null;
+ Path jsonMdlPath = null;
+ try {
+ trainingSet = ignite.createCache(trainingSetCfg);
+
+ // Fill training data.
+ generatePoints(trainingSet);
+
+ // Create regression trainer.
+ DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(10, 0);
+
+ // Train decision tree model.
+ DecisionTreeModel mdl = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
+
+ System.out.println("\n>>> Exported Decision tree regression model: " + mdl);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ DecisionTreeModel modelImportedFromJSON = DecisionTreeModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported Decision tree regression model: " + modelImportedFromJSON);
+
+ System.out.println(">>> ---------------------------------");
+ System.out.println(">>> | Prediction\t| Ground Truth\t|");
+ System.out.println(">>> ---------------------------------");
+
+ // Calculate score.
+ for (int x = 0; x < 10; x++) {
+ double predicted = mdl.predict(VectorUtils.of(x));
+
+ System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", predicted, Math.sin(x));
+ }
+
+ System.out.println(">>> ---------------------------------");
+
+ System.out.println("\n>>> Decision tree regression trainer example completed.");
+ }
+ finally {
+ if (trainingSet != null)
+ trainingSet.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+ /**
+ * Generates {@code sin(x)} on interval {@code [0, 10)} and loads into the specified cache.
+ */
+ private static void generatePoints(IgniteCache> trainingSet) {
+ for (int i = 0; i < 1000; i++) {
+ double x = i / 100.0;
+ double y = Math.sin(x);
+
+ trainingSet.put(i, new LabeledVector<>(VectorUtils.of(x), y));
+ }
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DiscreteNaiveBayesExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DiscreteNaiveBayesExportImportExample.java
new file mode 100644
index 00000000000000..c4d44c45684792
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/DiscreteNaiveBayesExportImportExample.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesModel;
+import org.apache.ignite.ml.naivebayes.discrete.DiscreteNaiveBayesTrainer;
+import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
+import org.apache.ignite.ml.selection.scoring.metric.MetricName;
+
+/**
+ * Run naive Bayes classification model based on
+ * naive Bayes classifier algorithm ({@link DiscreteNaiveBayesTrainer}) over distributed cache.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points.
+ *
+ *
+ * After that it trains the Discrete naive Bayes classification model based on the specified data.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
+ * compares prediction to expected outcome (ground truth), and builds
+ * confusion matrix.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class DiscreteNaiveBayesExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println(">>> Discrete naive Bayes classification model over partitioned dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println(">>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.ENGLISH_VS_SCOTTISH);
+
+ double[][] thresholds = new double[][] {{.5}, {.5}, {.5}, {.5}, {.5}};
+ System.out.println(">>> Create new Discrete naive Bayes classification trainer object.");
+ DiscreteNaiveBayesTrainer trainer = new DiscreteNaiveBayesTrainer()
+ .setBucketThresholds(thresholds);
+
+ System.out.println("\n>>> Perform the training to get the model.");
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+
+ DiscreteNaiveBayesModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+ System.out.println("\n>>> Exported Discrete Naive Bayes model: " + mdl.toString(true));
+
+ double accuracy = Evaluator.evaluate(
+ dataCache,
+ mdl,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for exported Discrete Naive Bayes model:" + accuracy);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ DiscreteNaiveBayesModel modelImportedFromJSON = DiscreteNaiveBayesModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported Discrete Naive Bayes model: " + modelImportedFromJSON.toString(true));
+
+ accuracy = Evaluator.evaluate(
+ dataCache,
+ modelImportedFromJSON,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for imported Discrete Naive Bayes model:" + accuracy);
+
+ System.out.println("\n>>> Discrete Naive bayes model over partitioned dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesClassificationExportImportExample.java
new file mode 100644
index 00000000000000..9aa8f228f717aa
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesClassificationExportImportExample.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.composition.boosting.GDBModel;
+import org.apache.ignite.ml.composition.boosting.GDBTrainer;
+import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
+import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * Example represents a solution for the task of classification learning based on Gradient Boosting on trees
+ * implementation. It shows an initialization of {@link GDBBinaryClassifierOnTreesTrainer}, initialization of Ignite
+ * Cache, learning step and comparing of predicted and real values.
+ *
+ * In this example dataset is created automatically by meander function {@code f(x) = [sin(x) > 0]}.
+ */
+public class GDBOnTreesClassificationExportImportExample {
+ /**
+ * Run example.
+ *
+ * @param args Command line arguments, none required.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> GDB classification trainer example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ // Create cache with training data.
+ CacheConfiguration trainingSetCfg = createCacheConfiguration();
+ IgniteCache trainingSet = null;
+ Path jsonMdlPath = null;
+ try {
+ trainingSet = fillTrainingData(ignite, trainingSetCfg);
+
+ // Create classification trainer.
+ GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(1.0, 300, 2, 0.)
+ .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.1));
+
+ // Train decision tree model.
+ GDBModel mdl = trainer.fit(
+ ignite,
+ trainingSet,
+ new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)
+ );
+
+ System.out.println("\n>>> Exported GDB classification model: " + mdl.toString(true));
+
+ predictOnGeneratedData(mdl);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ IgniteFunction lbMapper = lb -> lb > 0.5 ? 1.0 : 0.0;
+ GDBModel modelImportedFromJSON = GDBModel.fromJSON(jsonMdlPath).withLblMapping(lbMapper);
+
+ System.out.println("\n>>> Imported GDB classification model: " + modelImportedFromJSON.toString(true));
+
+ predictOnGeneratedData(modelImportedFromJSON);
+
+ System.out.println(">>> GDB classification trainer example completed.");
+ }
+ finally {
+ if (trainingSet != null)
+ trainingSet.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+ private static void predictOnGeneratedData(GDBModel mdl) {
+ System.out.println(">>> ---------------------------------");
+ System.out.println(">>> | Prediction\t| Valid answer\t|");
+ System.out.println(">>> ---------------------------------");
+
+ // Calculate score.
+ for (int x = -5; x < 5; x++) {
+ double predicted = mdl.predict(VectorUtils.of(x));
+
+ System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", predicted, Math.sin(x) < 0 ? 0.0 : 1.0);
+ }
+
+ System.out.println(">>> ---------------------------------");
+ System.out.println(">>> Count of trees = " + mdl.getModels().size());
+ System.out.println(">>> ---------------------------------");
+ }
+
+ /**
+ * Create cache configuration.
+ */
+ @NotNull private static CacheConfiguration createCacheConfiguration() {
+ CacheConfiguration trainingSetCfg = new CacheConfiguration<>();
+ trainingSetCfg.setName("TRAINING_SET");
+ trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
+ return trainingSetCfg;
+ }
+
+ /**
+ * Fill meander-like training data.
+ *
+ * @param ignite Ignite instance.
+ * @param trainingSetCfg Training set config.
+ */
+ @NotNull private static IgniteCache fillTrainingData(Ignite ignite,
+ CacheConfiguration trainingSetCfg) {
+ IgniteCache trainingSet = ignite.getOrCreateCache(trainingSetCfg);
+ for (int i = -50; i <= 50; i++) {
+ double x = ((double)i) / 10.0;
+ double y = Math.sin(x) < 0 ? 0.0 : 1.0;
+ trainingSet.put(i, new double[] {x, y});
+ }
+ return trainingSet;
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesRegressionExportImportExample.java
new file mode 100644
index 00000000000000..14233e316e4a3d
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GDBOnTreesRegressionExportImportExample.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.composition.boosting.GDBModel;
+import org.apache.ignite.ml.composition.boosting.GDBTrainer;
+import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
+import org.apache.ignite.ml.tree.boosting.GDBRegressionOnTreesTrainer;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * Example represents a solution for the task of regression learning based on Gradient Boosting on trees implementation.
+ * It shows an initialization of {@link GDBRegressionOnTreesTrainer}, initialization of Ignite Cache, learning step and
+ * comparing of predicted and real values.
+ *
+ * In this example dataset is created automatically by parabolic function {@code f(x) = x^2}.
+ */
+public class GDBOnTreesRegressionExportImportExample {
+ /**
+ * Run example.
+ *
+ * @param args Command line arguments, none required.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> GDB regression trainer example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println(">>> Ignite grid started.");
+
+ // Create cache with training data.
+ CacheConfiguration trainingSetCfg = createCacheConfiguration();
+ IgniteCache trainingSet = null;
+ Path jsonMdlPath = null;
+ try {
+ trainingSet = fillTrainingData(ignite, trainingSetCfg);
+
+ // Create regression trainer.
+ GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.)
+ .withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.001));
+
+ // Train decision tree model.
+ GDBModel mdl = trainer.fit(
+ ignite,
+ trainingSet,
+ new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)
+ );
+
+ System.out.println("\n>>> Exported GDB regression model: " + mdl.toString(true));
+
+ predictOnGeneratedData(mdl);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ IgniteFunction lbMapper = lb -> lb;
+ GDBModel modelImportedFromJSON = GDBModel.fromJSON(jsonMdlPath).withLblMapping(lbMapper);
+
+ System.out.println("\n>>> Imported GDB regression model: " + modelImportedFromJSON.toString(true));
+
+ predictOnGeneratedData(modelImportedFromJSON);
+
+ System.out.println(">>> GDB regression trainer example completed.");
+ }
+ finally {
+ if (trainingSet != null)
+ trainingSet.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+ private static void predictOnGeneratedData(GDBModel mdl) {
+ System.out.println(">>> ---------------------------------");
+ System.out.println(">>> | Prediction\t| Valid answer \t|");
+ System.out.println(">>> ---------------------------------");
+
+ // Calculate score.
+ for (int x = -5; x < 5; x++) {
+ double predicted = mdl.predict(VectorUtils.of(x));
+
+ System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", predicted, Math.pow(x, 2));
+ }
+
+ System.out.println(">>> ---------------------------------");
+ }
+
+ /**
+ * Create cache configuration.
+ */
+ @NotNull private static CacheConfiguration createCacheConfiguration() {
+ CacheConfiguration trainingSetCfg = new CacheConfiguration<>();
+ trainingSetCfg.setName("TRAINING_SET");
+ trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
+ return trainingSetCfg;
+ }
+
+ /**
+ * Fill parabolic training data.
+ *
+ * @param ignite Ignite instance.
+ * @param trainingSetCfg Training set config.
+ */
+ @NotNull private static IgniteCache fillTrainingData(Ignite ignite,
+ CacheConfiguration trainingSetCfg) {
+ IgniteCache trainingSet = ignite.getOrCreateCache(trainingSetCfg);
+ for (int i = -50; i <= 50; i++) {
+ double x = ((double)i) / 10.0;
+ double y = Math.pow(x, 2);
+ trainingSet.put(i, new double[] {x, y});
+ }
+ return trainingSet;
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GaussianNaiveBayesExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GaussianNaiveBayesExportImportExample.java
new file mode 100644
index 00000000000000..b6fb9c9fd20979
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/GaussianNaiveBayesExportImportExample.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesModel;
+import org.apache.ignite.ml.naivebayes.gaussian.GaussianNaiveBayesTrainer;
+import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
+import org.apache.ignite.ml.selection.scoring.metric.MetricName;
+
+/**
+ * Run naive Bayes classification model based on naive
+ * Bayes classifier algorithm ({@link GaussianNaiveBayesTrainer}) over distributed cache.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Iris dataset).
+ *
+ * After that it trains the naive Bayes classification model based on the specified data.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
+ * compares prediction to expected outcome (ground truth), and builds
+ * confusion matrix.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class GaussianNaiveBayesExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> Naive Bayes classification model over partitioned dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println(">>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
+
+ System.out.println(">>> Create new Gaussian Naive Bayes classification trainer object.");
+ GaussianNaiveBayesTrainer trainer = new GaussianNaiveBayesTrainer();
+
+ System.out.println("\n>>> Perform the training to get the model.");
+
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+
+ GaussianNaiveBayesModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+ System.out.println("\n>>> Exported Gaussian Naive Bayes model: " + mdl.toString(true));
+
+ double accuracy = Evaluator.evaluate(
+ dataCache,
+ mdl,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for exported Gaussian Naive Bayes model:" + accuracy);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ GaussianNaiveBayesModel modelImportedFromJSON = GaussianNaiveBayesModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported Gaussian Naive Bayes model: " + modelImportedFromJSON.toString(true));
+
+ accuracy = Evaluator.evaluate(
+ dataCache,
+ modelImportedFromJSON,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for imported Gaussian Naive Bayes model:" + accuracy);
+
+ System.out.println("\n>>> Gaussian Naive bayes model over partitioned dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/KMeansClusterizationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/KMeansClusterizationExportImportExample.java
new file mode 100644
index 00000000000000..ec5e6899f7eab9
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/KMeansClusterizationExportImportExample.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.clustering.kmeans.KMeansModel;
+import org.apache.ignite.ml.clustering.kmeans.KMeansTrainer;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.distances.WeightedMinkowskiDistance;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+
+/**
+ * Run KMeans clustering algorithm ({@link KMeansTrainer}) over distributed dataset.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Iris dataset).
+ *
+ * After that it trains the model based on the specified data using
+ * KMeans algorithm.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict what cluster does
+ * this point belong to, and compares prediction to expected outcome (ground truth).
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class KMeansClusterizationExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> KMeans clustering algorithm over cached dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println(">>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
+
+ Vectorizer vectorizer = new DummyVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST);
+
+ KMeansTrainer trainer = new KMeansTrainer()
+ .withDistance(new WeightedMinkowskiDistance(2, new double[] {5.9360, 2.7700, 4.2600, 1.3260}));
+ //.withDistance(new MinkowskiDistance(2));
+
+ KMeansModel mdl = trainer.fit(
+ ignite,
+ dataCache,
+ vectorizer
+ );
+
+ System.out.println("\n>>> Exported KMeans model: " + mdl);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ KMeansModel modelImportedFromJSON = KMeansModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported KMeans model: " + modelImportedFromJSON);
+
+ System.out.println("\n>>> KMeans clustering algorithm over cached dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LinearRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LinearRegressionExportImportExample.java
new file mode 100644
index 00000000000000..723784bb999c77
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LinearRegressionExportImportExample.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionLSQRTrainer;
+import org.apache.ignite.ml.regressions.linear.LinearRegressionModel;
+import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
+import org.apache.ignite.ml.selection.scoring.metric.MetricName;
+
+/**
+ * Run linear regression model based on LSQR algorithm
+ * ({@link LinearRegressionLSQRTrainer}) over cached dataset.
+ *
+ * Code in this example launches Ignite grid and fills the cache with simple test data.
+ *
+ * After that it trains the linear regression model based on the specified data.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict the target value
+ * and compares prediction to expected outcome (ground truth).
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class LinearRegressionExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> Linear regression model over cache based dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println(">>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.MORTALITY_DATA);
+
+ System.out.println("\n>>> Create new linear regression trainer object.");
+ LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();
+
+ System.out.println("\n>>> Perform the training to get the model.");
+
+ LinearRegressionModel mdl = trainer.fit(
+ ignite,
+ dataCache,
+ new DummyVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST)
+ );
+
+ System.out.println("\n>>> Exported LinearRegression model: " + mdl);
+
+ double rmse = Evaluator.evaluate(
+ dataCache,
+ mdl,
+ new DummyVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST),
+ MetricName.RMSE
+ );
+
+ System.out.println("\n>>> RMSE for exported LinearRegression model: " + rmse);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ LinearRegressionModel modelImportedFromJSON = LinearRegressionModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported LinearRegression model: " + modelImportedFromJSON);
+
+ rmse = Evaluator.evaluate(
+ dataCache,
+ mdl,
+ new DummyVectorizer().labeled(Vectorizer.LabelCoordinate.FIRST),
+ MetricName.RMSE
+ );
+
+ System.out.println("\n>>> RMSE for imported LinearRegression model: " + rmse);
+
+ System.out.println("\n>>> Linear regression model over cache based dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LogisticRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LogisticRegressionExportImportExample.java
new file mode 100644
index 00000000000000..6491f7edd5a3fd
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/LogisticRegressionExportImportExample.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.nn.UpdatesStrategy;
+import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate;
+import org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator;
+import org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel;
+import org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer;
+import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
+import org.apache.ignite.ml.selection.scoring.metric.MetricName;
+
+/**
+ * Run logistic regression model based on
+ * stochastic gradient descent algorithm ({@link LogisticRegressionSGDTrainer}) over distributed cache.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Iris dataset).
+ *
+ * After that it trains the logistic regression model based on the specified data.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict the target value,
+ * compares prediction to expected outcome (ground truth), and builds
+ * confusion matrix.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class LogisticRegressionExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> Logistic regression model over partitioned dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
+
+ System.out.println("\n>>> Create new logistic regression trainer object.");
+ LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer()
+ .withUpdatesStgy(new UpdatesStrategy<>(
+ new SimpleGDUpdateCalculator(0.2),
+ SimpleGDParameterUpdate.SUM_LOCAL,
+ SimpleGDParameterUpdate.AVG
+ ))
+ .withMaxIterations(100000)
+ .withLocIterations(100)
+ .withBatchSize(10)
+ .withSeed(123L);
+
+ System.out.println("\n>>> Perform the training to get the model.");
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+
+ LogisticRegressionModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+
+ System.out.println("\n>>> Exported logistic regression model: " + mdl);
+
+ double accuracy = Evaluator.evaluate(dataCache,
+ mdl, vectorizer, MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for exported logistic regression model " + accuracy);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ LogisticRegressionModel modelImportedFromJSON = LogisticRegressionModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported logistic regression model: " + modelImportedFromJSON);
+
+ accuracy = Evaluator.evaluate(dataCache,
+ modelImportedFromJSON, vectorizer, MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for imported logistic regression model " + accuracy);
+
+ System.out.println("\n>>> Logistic regression model over partitioned dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestClassificationExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestClassificationExportImportExample.java
new file mode 100644
index 00000000000000..6bb368f56f6b28
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestClassificationExportImportExample.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import javax.cache.Cache;
+import org.apache.commons.math3.util.Precision;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.query.QueryCursor;
+import org.apache.ignite.cache.query.ScanQuery;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.FeatureMeta;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.tree.randomforest.RandomForestClassifierTrainer;
+import org.apache.ignite.ml.tree.randomforest.RandomForestModel;
+import org.apache.ignite.ml.tree.randomforest.data.FeaturesCountSelectionStrategies;
+
+/**
+ * Example represents a solution for the task of wine classification based on a
+ * Random Forest implementation for
+ * multi-classification.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Wine recognition dataset).
+ *
+ * After that it initializes the {@link RandomForestClassifierTrainer} with thread pool for multi-thread learning and
+ * trains the model based on the specified data using random forest regression algorithm.
+ *
+ * Finally, this example loops over the test set of data points, compares prediction of the trained model to the
+ * expected outcome (ground truth), and evaluates accuracy of the model.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class RandomForestClassificationExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> Random Forest multi-class classification algorithm over cached dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.WINE_RECOGNITION);
+
+ AtomicInteger idx = new AtomicInteger(0);
+ RandomForestClassifierTrainer classifier = new RandomForestClassifierTrainer(
+ IntStream.range(0, dataCache.get(1).size() - 1).mapToObj(
+ x -> new FeatureMeta("", idx.getAndIncrement(), false)).collect(Collectors.toList())
+ ).withAmountOfTrees(101)
+ .withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD)
+ .withMaxDepth(4)
+ .withMinImpurityDelta(0.)
+ .withSubSampleSize(0.3)
+ .withSeed(0);
+
+ System.out.println(">>> Configured trainer: " + classifier.getClass().getSimpleName());
+
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+ RandomForestModel mdl = classifier.fit(ignite, dataCache, vectorizer);
+
+ System.out.println(">>> Exported Random Forest classification model: " + mdl.toString(true));
+
+ double accuracy = evaluateModel(dataCache, mdl);
+
+ System.out.println("\n>>> Accuracy for exported Random Forest classification model " + accuracy);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ RandomForestModel modelImportedFromJSON = RandomForestModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported Random Forest classification model: " + modelImportedFromJSON);
+
+ accuracy = evaluateModel(dataCache, mdl);
+
+ System.out.println("\n>>> Accuracy for imported Random Forest classification model " + accuracy);
+
+ System.out.println("\n>>> Random Forest multi-class classification algorithm over cached dataset usage example completed.");
+
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+ private static double evaluateModel(IgniteCache dataCache, RandomForestModel randomForestMdl) {
+ int amountOfErrors = 0;
+ int totalAmount = 0;
+
+ try (QueryCursor> observations = dataCache.query(new ScanQuery<>())) {
+ for (Cache.Entry observation : observations) {
+ Vector val = observation.getValue();
+ Vector inputs = val.copyOfRange(1, val.size());
+ double groundTruth = val.get(0);
+
+ double prediction = randomForestMdl.predict(inputs);
+
+ totalAmount++;
+ if (!Precision.equals(groundTruth, prediction, Precision.EPSILON))
+ amountOfErrors++;
+ }
+ }
+
+ return 1 - amountOfErrors / (double) totalAmount;
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestRegressionExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestRegressionExportImportExample.java
new file mode 100644
index 00000000000000..4d7d4ad738fb8b
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/RandomForestRegressionExportImportExample.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import javax.cache.Cache;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.query.QueryCursor;
+import org.apache.ignite.cache.query.ScanQuery;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.FeatureMeta;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.environment.LearningEnvironmentBuilder;
+import org.apache.ignite.ml.environment.logging.ConsoleLogger;
+import org.apache.ignite.ml.environment.parallelism.ParallelismStrategy;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.tree.randomforest.RandomForestModel;
+import org.apache.ignite.ml.tree.randomforest.RandomForestRegressionTrainer;
+import org.apache.ignite.ml.tree.randomforest.data.FeaturesCountSelectionStrategies;
+
+/**
+ * Example represents a solution for the task of price predictions for houses in Boston based on a
+ * Random Forest implementation for regression.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Boston Housing dataset).
+ *
+ * After that it initializes the {@link RandomForestRegressionTrainer} and trains the model based on the specified data
+ * using random forest regression algorithm.
+ *
+ * Finally, this example loops over the test set of data points, compares prediction of the trained model to the
+ * expected outcome (ground truth), and evaluates model quality in terms of Mean Squared Error (MSE) and Mean Absolute
+ * Error (MAE).
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class RandomForestRegressionExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> Random Forest regression algorithm over cached dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.BOSTON_HOUSE_PRICES);
+
+ AtomicInteger idx = new AtomicInteger(0);
+ RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(
+ IntStream.range(0, dataCache.get(1).size() - 1).mapToObj(
+ x -> new FeatureMeta("", idx.getAndIncrement(), false)).collect(Collectors.toList())
+ ).withAmountOfTrees(101)
+ .withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD)
+ .withMaxDepth(4)
+ .withMinImpurityDelta(0.)
+ .withSubSampleSize(0.3)
+ .withSeed(0);
+
+ trainer.withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder()
+ .withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
+ .withLoggingFactoryDependency(ConsoleLogger.Factory.LOW)
+ );
+
+ System.out.println("\n>>> Configured trainer: " + trainer.getClass().getSimpleName());
+
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+ RandomForestModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+
+ System.out.println("\n>>> Exported Random Forest regression model: " + mdl.toString(true));
+
+ double mae = evaluateModel(dataCache, mdl);
+
+ System.out.println("\n>>> Mean absolute error (MAE) for exported Random Forest regression model " + mae);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ RandomForestModel modelImportedFromJSON = RandomForestModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Exported Random Forest regression model: " + modelImportedFromJSON.toString(true));
+
+ mae = evaluateModel(dataCache, modelImportedFromJSON);
+
+ System.out.println("\n>>> Mean absolute error (MAE) for exported Random Forest regression model " + mae);
+
+ System.out.println("\n>>> Random Forest regression algorithm over cached dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+
+ private static double evaluateModel(IgniteCache dataCache, RandomForestModel randomForestMdl) {
+ double mae = 0.0;
+ int totalAmount = 0;
+
+ try (QueryCursor> observations = dataCache.query(new ScanQuery<>())) {
+ for (Cache.Entry observation : observations) {
+ Vector val = observation.getValue();
+ Vector inputs = val.copyOfRange(1, val.size());
+ double groundTruth = val.get(0);
+
+ double prediction = randomForestMdl.predict(inputs);
+
+ mae += Math.abs(prediction - groundTruth);
+
+ totalAmount++;
+ }
+
+ mae /= totalAmount;
+ }
+ return mae;
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/SVMExportImportExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/SVMExportImportExample.java
new file mode 100644
index 00000000000000..24262901b88950
--- /dev/null
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/exchange/SVMExportImportExample.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.inference.exchange;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ml.util.MLSandboxDatasets;
+import org.apache.ignite.examples.ml.util.SandboxMLCache;
+import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
+import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
+import org.apache.ignite.ml.math.primitives.vector.Vector;
+import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
+import org.apache.ignite.ml.selection.scoring.metric.MetricName;
+import org.apache.ignite.ml.svm.SVMLinearClassificationModel;
+import org.apache.ignite.ml.svm.SVMLinearClassificationTrainer;
+
+/**
+ * Run SVM binary-class classification model ({@link SVMLinearClassificationModel}) over distributed dataset.
+ *
+ * Code in this example launches Ignite grid and fills the cache with test data points (based on the
+ * Iris dataset).
+ *
+ * After that it trains the model based on the specified data using KMeans algorithm.
+ *
+ * Finally, this example loops over the test set of data points, applies the trained model to predict what cluster does
+ * this point belong to, compares prediction to expected outcome (ground truth), and builds
+ * confusion matrix.
+ *
+ * You can change the test data used in this example and re-run it to explore this algorithm further.
+ */
+public class SVMExportImportExample {
+ /**
+ * Run example.
+ */
+ public static void main(String[] args) throws IOException {
+ System.out.println();
+ System.out.println(">>> SVM Binary classification model over cached dataset usage example started.");
+ // Start ignite grid.
+ try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+ System.out.println("\n>>> Ignite grid started.");
+
+ IgniteCache dataCache = null;
+ Path jsonMdlPath = null;
+ try {
+ dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
+
+ SVMLinearClassificationTrainer trainer = new SVMLinearClassificationTrainer();
+
+ Vectorizer vectorizer = new DummyVectorizer()
+ .labeled(Vectorizer.LabelCoordinate.FIRST);
+
+ SVMLinearClassificationModel mdl = trainer.fit(ignite, dataCache, vectorizer);
+
+ System.out.println("\n>>> Exported SVM model: " + mdl);
+
+ double accuracy = Evaluator.evaluate(
+ dataCache,
+ mdl,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for exported SVM model: " + accuracy);
+
+ jsonMdlPath = Files.createTempFile(null, null);
+ mdl.toJSON(jsonMdlPath);
+
+ SVMLinearClassificationModel modelImportedFromJSON = SVMLinearClassificationModel.fromJSON(jsonMdlPath);
+
+ System.out.println("\n>>> Imported SVM model: " + modelImportedFromJSON);
+
+ accuracy = Evaluator.evaluate(
+ dataCache,
+ modelImportedFromJSON,
+ vectorizer,
+ MetricName.ACCURACY
+ );
+
+ System.out.println("\n>>> Accuracy for imported SVM model: " + accuracy);
+
+ System.out.println("\n>>> SVM Binary classification model over cache based dataset usage example completed.");
+ }
+ finally {
+ if (dataCache != null)
+ dataCache.destroy();
+ if (jsonMdlPath != null)
+ Files.deleteIfExists(jsonMdlPath);
+ }
+ }
+ finally {
+ System.out.flush();
+ }
+ }
+}
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java
index 3340ed9d33e0a8..d03bb966f6a7d5 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeFromSparkExample.java
@@ -34,7 +34,7 @@
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.sparkmodelparser.SparkModelParser;
import org.apache.ignite.ml.sparkmodelparser.SupportedSparkModels;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Run Decision Tree model loaded from snappy.parquet file. The snappy.parquet file was generated by Spark MLLib
@@ -69,7 +69,7 @@ public static void main(String[] args) throws FileNotFoundException {
final Vectorizer vectorizer = new DummyVectorizer(0, 5, 6, 4).labeled(1);
- DecisionTreeNode mdl = (DecisionTreeNode)SparkModelParser.parse(
+ DecisionTreeModel mdl = (DecisionTreeModel)SparkModelParser.parse(
SPARK_MDL_PATH,
SupportedSparkModels.DECISION_TREE,
env
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java
index 9c36198b2cf389..5fd446140f38af 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/inference/spark/modelparser/DecisionTreeRegressionFromSparkExample.java
@@ -35,7 +35,7 @@
import org.apache.ignite.ml.sparkmodelparser.SparkModelParser;
import org.apache.ignite.ml.sparkmodelparser.SupportedSparkModels;
import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Run Decision tree regression model loaded from snappy.parquet file. The snappy.parquet file was generated by Spark
@@ -69,7 +69,7 @@ public static void main(String[] args) throws FileNotFoundException {
final Vectorizer vectorizer = new DummyVectorizer(0, 1, 5, 6).labeled(4);
- DecisionTreeNode mdl = (DecisionTreeNode)SparkModelParser.parse(
+ DecisionTreeModel mdl = (DecisionTreeModel)SparkModelParser.parse(
SPARK_MDL_PATH,
SupportedSparkModels.DECISION_TREE_REGRESSION,
env
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java
index c24091c253b392..233cb13b9135b6 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExample.java
@@ -31,7 +31,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Example that shows how to use String Encoder preprocessor to encode features presented as a strings.
@@ -73,7 +73,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
encoderPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java
index d9482a5123477a..7270b03e4016f1 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/EncoderExampleWithNormalization.java
@@ -32,7 +32,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Example that shows how to combine together two preprocessors: String Encoder preprocessor to encode features presented as a strings
@@ -80,7 +80,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
normalizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java
index d97c49c78411a4..3547d7e20106c4 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/preprocessing/encoding/LabelEncoderExample.java
@@ -31,7 +31,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Example that shows how to use Label Encoder preprocessor to encode labels presented as a strings.
@@ -79,7 +79,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
lbEncoderPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java
index 511eb0501c1817..c572d81038741a 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/regression/linear/BostonHousePricesPredictionExample.java
@@ -105,7 +105,7 @@ public static void main(String[] args) throws IOException {
private static String toString(LinearRegressionModel mdl) {
BiFunction formatter = (idx, val) -> String.format("%.2f*f%d", val, idx);
- Vector weights = mdl.getWeights();
+ Vector weights = mdl.weights();
StringBuilder sb = new StringBuilder(formatter.apply(0, weights.get(0)));
for (int fid = 1; fid < weights.size(); fid++) {
@@ -114,7 +114,7 @@ private static String toString(LinearRegressionModel mdl) {
.append(formatter.apply(fid, Math.abs(w)));
}
- double intercept = mdl.getIntercept();
+ double intercept = mdl.intercept();
sb.append(" ").append(intercept > 0 ? "+" : "-").append(" ")
.append(String.format("%.2f", Math.abs(intercept)));
return sb.toString();
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java
index e6a4461ca64e1d..93dc0513ebf801 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/selection/cv/CrossValidationExample.java
@@ -30,7 +30,7 @@
import org.apache.ignite.ml.selection.scoring.metric.MetricName;
import org.apache.ignite.ml.structures.LabeledVector;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Run decision tree classification with
@@ -75,7 +75,7 @@ public static void main(String... args) {
LabeledDummyVectorizer vectorizer = new LabeledDummyVectorizer<>();
- CrossValidation> scoreCalculator
+ CrossValidation> scoreCalculator
= new CrossValidation<>();
double[] accuracyScores = scoreCalculator
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java
index 543e211f06b3f1..68058b75b9eb32 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLInferenceExample.java
@@ -30,7 +30,7 @@
import org.apache.ignite.ml.sql.SQLFunctions;
import org.apache.ignite.ml.sql.SqlDatasetBuilder;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
import static org.apache.ignite.examples.ml.sql.DecisionTreeClassificationTrainerSQLTableExample.loadTitanicDatasets;
@@ -101,7 +101,7 @@ public static void main(String[] args) throws IOException {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
System.out.println(">>> Perform training...");
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
new SqlDatasetBuilder(ignite, "SQL_PUBLIC_TITANIC_TRAIN"),
new BinaryObjectVectorizer<>("pclass", "age", "sibsp", "parch", "fare")
.withFeature("sex", BinaryObjectVectorizer.Mapping.create().map("male", 1.0).defaultValue(0.0))
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java
index 083608ee23e674..d05d1a91d814f7 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/sql/DecisionTreeClassificationTrainerSQLTableExample.java
@@ -34,7 +34,7 @@
import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
import org.apache.ignite.ml.sql.SqlDatasetBuilder;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Example of using distributed {@link DecisionTreeClassificationTrainer} on a data stored in SQL table.
@@ -101,7 +101,7 @@ public static void main(String[] args) throws IgniteCheckedException, IOExceptio
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
System.out.println(">>> Perform training...");
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
new SqlDatasetBuilder(ignite, "SQL_PUBLIC_TITANIC_TRAIN"),
new BinaryObjectVectorizer<>("pclass", "age", "sibsp", "parch", "fare")
.withFeature("sex", BinaryObjectVectorizer.Mapping.create().map("male", 1.0).defaultValue(0.0))
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java
index 600f4a595e0a4e..b1cf23e2de2dd2 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeClassificationTrainerExample.java
@@ -28,7 +28,7 @@
import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
import org.apache.ignite.ml.structures.LabeledVector;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Example of using distributed {@link DecisionTreeClassificationTrainer}.
@@ -75,7 +75,7 @@ public static void main(String... args) {
// Train decision tree model.
LabeledDummyVectorizer vectorizer = new LabeledDummyVectorizer<>();
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
trainingSet,
vectorizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java
index 1a1977124879b8..5cfb828c8bd870 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/DecisionTreeRegressionTrainerExample.java
@@ -25,7 +25,7 @@
import org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer;
import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
import org.apache.ignite.ml.structures.LabeledVector;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
import org.apache.ignite.ml.tree.DecisionTreeRegressionTrainer;
/**
@@ -70,7 +70,7 @@ public static void main(String... args) {
DecisionTreeRegressionTrainer trainer = new DecisionTreeRegressionTrainer(10, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
+ DecisionTreeModel mdl = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
System.out.println(">>> Decision tree regression model: " + mdl);
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java
index a2eaf47d46636d..7e6c5d3f723d67 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesClassificationTrainerExample.java
@@ -22,12 +22,12 @@
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.composition.ModelsComposition;
+import org.apache.ignite.ml.composition.boosting.GDBModel;
+import org.apache.ignite.ml.composition.boosting.GDBTrainer;
import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.trainers.DatasetTrainer;
import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
import org.jetbrains.annotations.NotNull;
@@ -58,11 +58,11 @@ public static void main(String... args) {
trainingSet = fillTrainingData(ignite, trainingSetCfg);
// Create classification trainer.
- DatasetTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(1.0, 300, 2, 0.)
+ GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(1.0, 300, 2, 0.)
.withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.1));
// Train decision tree model.
- ModelsComposition mdl = trainer.fit(
+ GDBModel mdl = trainer.fit(
ignite,
trainingSet,
new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java
index 09dd708b0bb891..a6ea135aa8b5db 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tree/boosting/GDBOnTreesRegressionTrainerExample.java
@@ -22,14 +22,12 @@
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.ml.composition.ModelsComposition;
+import org.apache.ignite.ml.composition.boosting.GDBModel;
+import org.apache.ignite.ml.composition.boosting.GDBTrainer;
import org.apache.ignite.ml.composition.boosting.convergence.mean.MeanAbsValueConvergenceCheckerFactory;
import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
import org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer;
-import org.apache.ignite.ml.inference.Model;
-import org.apache.ignite.ml.math.primitives.vector.Vector;
import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
-import org.apache.ignite.ml.trainers.DatasetTrainer;
import org.apache.ignite.ml.tree.boosting.GDBRegressionOnTreesTrainer;
import org.jetbrains.annotations.NotNull;
@@ -60,11 +58,11 @@ public static void main(String... args) {
trainingSet = fillTrainingData(ignite, trainingSetCfg);
// Create regression trainer.
- DatasetTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.)
+ GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.)
.withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.001));
// Train decision tree model.
- Model mdl = trainer.fit(
+ GDBModel mdl = trainer.fit(
ignite,
trainingSet,
new DoubleArrayVectorizer().labeled(Vectorizer.LabelCoordinate.LAST)
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java
index b9006f536505f8..b8e1d00f690dfa 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_11_Boosting.java
@@ -21,7 +21,8 @@
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.Ignition;
-import org.apache.ignite.ml.composition.ModelsComposition;
+import org.apache.ignite.ml.composition.boosting.GDBModel;
+import org.apache.ignite.ml.composition.boosting.GDBTrainer;
import org.apache.ignite.ml.composition.boosting.convergence.median.MedianOfMedianConvergenceCheckerFactory;
import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer;
import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer;
@@ -36,7 +37,6 @@
import org.apache.ignite.ml.selection.scoring.metric.MetricName;
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
-import org.apache.ignite.ml.trainers.DatasetTrainer;
import org.apache.ignite.ml.tree.boosting.GDBBinaryClassifierOnTreesTrainer;
/**
@@ -102,11 +102,11 @@ public static void main(String[] args) {
);
// Create classification trainer.
- DatasetTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(0.5, 500, 4, 0.)
+ GDBTrainer trainer = new GDBBinaryClassifierOnTreesTrainer(0.5, 500, 4, 0.)
.withCheckConvergenceStgyFactory(new MedianOfMedianConvergenceCheckerFactory(0.1));
// Train decision tree model.
- ModelsComposition mdl = trainer.fit(
+ GDBModel mdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java
index b6df5d68a637a9..97ccb5835a721a 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_1_Read_and_Learn.java
@@ -27,7 +27,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Usage of {@link DecisionTreeClassificationTrainer} to predict death in the disaster.
@@ -56,7 +56,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
vectorizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
index 094a966e350864..a020dbea669f36 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
@@ -29,7 +29,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Usage of {@link ImputerTrainer} to fill missed data ({@code Double.NaN}) values in the chosen columns.
@@ -66,7 +66,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
vectorizer
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java
index 68b05a46d8fd5a..c97ee387310e2b 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial.java
@@ -31,7 +31,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Let's add two categorial features "sex", "embarked" to predict more precisely than in {@link Step_1_Read_and_Learn}.
@@ -80,7 +80,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
imputingPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java
index 206d2dcaa06b5b..1355979d228faa 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_3_Categorial_with_One_Hot_Encoder.java
@@ -31,7 +31,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Let's add two categorial features "sex", "embarked" to predict more precisely than in {@link
@@ -83,7 +83,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
imputingPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java
index 1d85a14ac74c22..f4763a1f2b66b4 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_4_Add_age_fare.java
@@ -31,7 +31,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* Add yet two numerical features "age", "fare" to improve our model over {@link Step_3_Categorial}.
@@ -79,7 +79,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
imputingPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java
index dfb6de0c7d5434..05d0137e05b2eb 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_5_Scaling.java
@@ -33,7 +33,7 @@
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* {@link MinMaxScalerTrainer} and {@link NormalizationTrainer} are used in this example due to different values
@@ -97,7 +97,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
normalizationPreprocessor
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java
index e104c510b348a4..a60a8bac9812bf 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_7_Split_train_test.java
@@ -35,7 +35,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* The highest accuracy in the previous example ({@link Step_6_KNN}) is the result of
@@ -103,7 +103,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
- DecisionTreeNode mdl = trainer.fit(
+ DecisionTreeModel mdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java
index 0da797d06b55f2..20f4a7287b7bd2 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV.java
@@ -38,7 +38,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation will be used in this example.
@@ -126,7 +126,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer
= new DecisionTreeClassificationTrainer(maxDeep, 0);
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
double[] scores = scoreCalculator
@@ -167,7 +167,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(bestMaxDeep, 0);
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java
index 5b6271414541e9..963e1b7fed1b0e 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid.java
@@ -40,7 +40,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -119,7 +119,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
@@ -156,7 +156,7 @@ public static void main(String[] args) {
-> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java
index 6be849624382ae..1aa2d576946cf5 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_8_CV_with_Param_Grid_and_pipeline.java
@@ -36,7 +36,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -91,7 +91,7 @@ public static void main(String[] args) {
// Tune hyper-parameters with K-fold Cross-Validation on the split training set.
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java
index d7e2f27aad9009..c489fc962bba77 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_13_RandomSearch.java
@@ -42,7 +42,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -123,7 +123,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
@@ -166,7 +166,7 @@ public static void main(String[] args) {
-> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java
index 017f123674494c..b63bf9643be632 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_14_Parallel_Brute_Force_Search.java
@@ -45,7 +45,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -126,7 +126,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
@@ -168,7 +168,7 @@ public static void main(String[] args) {
-> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java
index 3a3e9e8cdddfb5..ac6c1eb3c988a9 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_15_Parallel_Random_Search.java
@@ -45,7 +45,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -125,7 +125,7 @@ public static void main(String[] args) {
// Tune hyper-parameters with K-fold Cross-Validation on the split training set.
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
@@ -171,7 +171,7 @@ public static void main(String[] args) {
-> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java
index bee51e4b1e97f9..408eb48289c212 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_16_Genetic_Programming_Search.java
@@ -42,7 +42,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -123,7 +123,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
@@ -162,7 +162,7 @@ public static void main(String[] args) {
-> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java
index 34a8158dec6305..a9d39bd3092199 100644
--- a/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java
+++ b/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/hyperparametertuning/Step_17_Parallel_Genetic_Programming_Search.java
@@ -45,7 +45,7 @@
import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter;
import org.apache.ignite.ml.selection.split.TrainTestSplit;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
-import org.apache.ignite.ml.tree.DecisionTreeNode;
+import org.apache.ignite.ml.tree.DecisionTreeModel;
/**
* To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example.
@@ -126,7 +126,7 @@ public static void main(String[] args) {
DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
- CrossValidation scoreCalculator
+ CrossValidation scoreCalculator
= new CrossValidation<>();
ParamGrid paramGrid = new ParamGrid()
@@ -168,7 +168,7 @@ public static void main(String[] args) {
-> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
// Train decision tree model.
- DecisionTreeNode bestMdl = trainer.fit(
+ DecisionTreeModel bestMdl = trainer.fit(
ignite,
dataCache,
split.getTrainFilter(),
diff --git a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientAbstractBenchmark.java b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientAbstractBenchmark.java
new file mode 100644
index 00000000000000..6b6dc53dffd545
--- /dev/null
+++ b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientAbstractBenchmark.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.benchmarks.jmh.thin;
+
+import java.util.stream.IntStream;
+
+import org.apache.ignite.Ignite;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.client.ClientCache;
+import org.apache.ignite.client.IgniteClient;
+import org.apache.ignite.configuration.ClientConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.benchmarks.jmh.JmhAbstractBenchmark;
+import org.apache.ignite.internal.util.typedef.internal.A;
+import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
+
+/**
+ * Base class for thin client benchmarks.
+ */
+@State(Scope.Benchmark)
+public abstract class JmhThinClientAbstractBenchmark extends JmhAbstractBenchmark {
+ /** Property: nodes count. */
+ protected static final String PROP_DATA_NODES = "ignite.jmh.thin.dataNodes";
+
+ /** Default amount of nodes. */
+ protected static final int DFLT_DATA_NODES = 4;
+
+ /** Items count. */
+ protected static final int CNT = 1000;
+
+ /** Cache value. */
+ protected static final byte[] PAYLOAD = new byte[1000];
+
+ /** IP finder shared across nodes. */
+ private static final TcpDiscoveryVmIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
+
+ /** Default cache name. */
+ private static final String DEFAULT_CACHE_NAME = "default";
+
+ /** Target node. */
+ protected Ignite node;
+
+ /** Target cache. */
+ protected ClientCache cache;
+
+ /** Thin client. */
+ protected IgniteClient client;
+
+ /**
+ * Setup routine. Child classes must invoke this method first.
+ *
+ */
+ @Setup
+ public void setup() {
+ System.out.println();
+ System.out.println("--------------------");
+ System.out.println("IGNITE BENCHMARK INFO: ");
+ System.out.println("\tdata nodes: " + intProperty(PROP_DATA_NODES, DFLT_DATA_NODES));
+ System.out.println("--------------------");
+ System.out.println();
+
+ int nodesCnt = intProperty(PROP_DATA_NODES, DFLT_DATA_NODES);
+
+ A.ensure(nodesCnt >= 1, "nodesCnt >= 1");
+
+ node = Ignition.start(configuration("node0"));
+
+ for (int i = 1; i < nodesCnt; i++)
+ Ignition.start(configuration("node" + i));
+
+ String[] addrs = IntStream
+ .range(10800, 10800 + nodesCnt)
+ .mapToObj(p -> "127.0.0.1:" + p)
+ .toArray(String[]::new);
+
+ ClientConfiguration cfg = new ClientConfiguration()
+ .setAddresses(addrs)
+ .setPartitionAwarenessEnabled(true);
+
+ client = Ignition.startClient(cfg);
+
+ cache = client.getOrCreateCache(DEFAULT_CACHE_NAME);
+
+ System.out.println("Loading test data...");
+
+ for (int i = 0; i < CNT; i++)
+ cache.put(i, PAYLOAD);
+
+ System.out.println("Test data loaded: " + CNT);
+ }
+
+ /**
+ * Tear down routine.
+ *
+ */
+ @TearDown
+ public void tearDown() throws Exception {
+ client.close();
+ Ignition.stopAll(true);
+ }
+
+ /**
+ * Create Ignite configuration.
+ *
+ * @param igniteInstanceName Ignite instance name.
+ * @return Configuration.
+ */
+ protected IgniteConfiguration configuration(String igniteInstanceName) {
+
+ return new IgniteConfiguration()
+ .setIgniteInstanceName(igniteInstanceName)
+ .setLocalHost("127.0.0.1")
+ .setDiscoverySpi(new TcpDiscoverySpi().setIpFinder(IP_FINDER));
+ }
+}
diff --git a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientCacheBenchmark.java b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientCacheBenchmark.java
new file mode 100644
index 00000000000000..88e6a87171d9c6
--- /dev/null
+++ b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/thin/JmhThinClientCacheBenchmark.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.benchmarks.jmh.thin;
+
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.ignite.internal.benchmarks.jmh.runner.JmhIdeBenchmarkRunner;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Mode;
+
+/**
+ * Thin client cache benchmark.
+ *
+ * Results on i7-9700K, Ubuntu 20.04.1, JDK 1.8.0_275:
+ * Benchmark Mode Cnt Score Error Units
+ * JmhThinClientCacheBenchmark.get thrpt 10 92501.557 ± 1380.384 ops/s
+ * JmhThinClientCacheBenchmark.put thrpt 10 82907.446 ± 7572.537 ops/s
+ *
+ * JmhThinClientCacheBenchmark.get avgt 10 41.505 ± 1.018 us/op
+ * JmhThinClientCacheBenchmark.put avgt 10 44.623 ± 0.779 us/op
+ */
+public class JmhThinClientCacheBenchmark extends JmhThinClientAbstractBenchmark {
+ /**
+ * Cache put benchmark.
+ */
+ @Benchmark
+ public void put() {
+ int key = ThreadLocalRandom.current().nextInt(CNT);
+
+ cache.put(key, PAYLOAD);
+ }
+
+ /**
+ * Cache get benchmark.
+ */
+ @Benchmark
+ public Object get() {
+ int key = ThreadLocalRandom.current().nextInt(CNT);
+
+ return cache.get(key);
+ }
+
+ /**
+ * Run benchmarks.
+ *
+ * @param args Arguments.
+ * @throws Exception If failed.
+ */
+ public static void main(String[] args) throws Exception {
+ JmhIdeBenchmarkRunner runner = JmhIdeBenchmarkRunner.create()
+ .forks(1)
+ .threads(4)
+ .benchmarks(JmhThinClientCacheBenchmark.class.getSimpleName())
+ .jvmArguments("-Xms4g", "-Xmx4g");
+
+ runner
+ .benchmarkModes(Mode.Throughput)
+ .run();
+
+ runner
+ .benchmarkModes(Mode.AverageTime)
+ .outputTimeUnit(TimeUnit.MICROSECONDS)
+ .run();
+ }
+}
diff --git a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/tree/BPlusTreeBenchmark.java b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/tree/BPlusTreeBenchmark.java
index 7a35430b36ea5a..af843cbbd5f74d 100644
--- a/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/tree/BPlusTreeBenchmark.java
+++ b/modules/benchmarks/src/main/java/org/apache/ignite/internal/benchmarks/jmh/tree/BPlusTreeBenchmark.java
@@ -103,6 +103,11 @@ private static class FakeReuseList implements ReuseList {
return pageId == null ? 0L : pageId;
}
+ /** {@inheritDoc} */
+ @Override public long initRecycledPage(long pageId, byte flag, PageIO initIO) throws IgniteCheckedException {
+ return pageId;
+ }
+
/** {@inheritDoc} */
@Override public long recycledPagesCount() throws IgniteCheckedException {
return deque.size();
@@ -186,6 +191,7 @@ protected static class TestTree extends BPlusTree {
reuseList,
new IOVersions<>(new LongInnerIO()),
new IOVersions<>(new LongLeafIO()),
+ PageIdAllocator.FLAG_IDX,
null,
null
);
diff --git a/modules/clients/src/test/java/org/apache/ignite/internal/client/rest/GridProbeCommandTest.java b/modules/clients/src/test/java/org/apache/ignite/internal/client/rest/GridProbeCommandTest.java
new file mode 100644
index 00000000000000..af09544db0d57a
--- /dev/null
+++ b/modules/clients/src/test/java/org/apache/ignite/internal/client/rest/GridProbeCommandTest.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.client.rest;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.ignite.configuration.ConnectorConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.IgniteInternalFuture;
+import org.apache.ignite.internal.processors.rest.GridRestCommand;
+import org.apache.ignite.internal.processors.rest.GridRestResponse;
+import org.apache.ignite.internal.processors.rest.handlers.GridRestCommandHandler;
+import org.apache.ignite.internal.processors.rest.handlers.probe.GridProbeCommandHandler;
+import org.apache.ignite.internal.processors.rest.request.GridRestCacheRequest;
+import org.apache.ignite.plugin.AbstractTestPluginProvider;
+import org.apache.ignite.plugin.PluginProvider;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Test;
+
+/**
+ * Test whether REST probe command works correctly when kernal has started and vice versa.
+ */
+public class GridProbeCommandTest extends GridCommonAbstractTest {
+ /** */
+ private static final int JETTY_PORT = 8080;
+
+ /** */
+ private CountDownLatch triggerRestCmdLatch = new CountDownLatch(1);
+
+ /** */
+ private CountDownLatch triggerPluginStartLatch = new CountDownLatch(1);
+
+ /** */
+ public static Map executeProbeRestRequest() throws IOException {
+ HttpURLConnection conn = (HttpURLConnection)(new URL("http://localhost:" + JETTY_PORT + "/ignite?cmd=probe").openConnection());
+ conn.connect();
+
+ boolean isHTTP_OK = conn.getResponseCode() == HttpURLConnection.HTTP_OK;
+
+ Map restResponse = null;
+
+ try (InputStreamReader streamReader = new InputStreamReader(isHTTP_OK ? conn.getInputStream() : conn.getErrorStream())) {
+
+ ObjectMapper objMapper = new ObjectMapper();
+ restResponse = objMapper.readValue(streamReader,
+ new TypeReference